]>
Commit | Line | Data |
---|---|---|
230d793d | 1 | /* Optimize by combining instructions for GNU compiler. |
3c71940f | 2 | Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, |
d9221e01 | 3 | 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc. |
230d793d | 4 | |
1322177d | 5 | This file is part of GCC. |
230d793d | 6 | |
1322177d LB |
7 | GCC is free software; you can redistribute it and/or modify it under |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 2, or (at your option) any later | |
10 | version. | |
230d793d | 11 | |
1322177d LB |
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
230d793d RS |
16 | |
17 | You should have received a copy of the GNU General Public License | |
1322177d LB |
18 | along with GCC; see the file COPYING. If not, write to the Free |
19 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
20 | 02111-1307, USA. */ | |
230d793d | 21 | |
230d793d RS |
22 | /* This module is essentially the "combiner" phase of the U. of Arizona |
23 | Portable Optimizer, but redone to work on our list-structured | |
24 | representation for RTL instead of their string representation. | |
25 | ||
26 | The LOG_LINKS of each insn identify the most recent assignment | |
27 | to each REG used in the insn. It is a list of previous insns, | |
28 | each of which contains a SET for a REG that is used in this insn | |
29 | and not used or set in between. LOG_LINKs never cross basic blocks. | |
30 | They were set up by the preceding pass (lifetime analysis). | |
31 | ||
32 | We try to combine each pair of insns joined by a logical link. | |
33 | We also try to combine triples of insns A, B and C when | |
34 | C has a link back to B and B has a link back to A. | |
35 | ||
36 | LOG_LINKS does not have links for use of the CC0. They don't | |
37 | need to, because the insn that sets the CC0 is always immediately | |
38 | before the insn that tests it. So we always regard a branch | |
39 | insn as having a logical link to the preceding insn. The same is true | |
40 | for an insn explicitly using CC0. | |
41 | ||
42 | We check (with use_crosses_set_p) to avoid combining in such a way | |
43 | as to move a computation to a place where its value would be different. | |
44 | ||
45 | Combination is done by mathematically substituting the previous | |
46 | insn(s) values for the regs they set into the expressions in | |
47 | the later insns that refer to these regs. If the result is a valid insn | |
48 | for our target machine, according to the machine description, | |
49 | we install it, delete the earlier insns, and update the data flow | |
50 | information (LOG_LINKS and REG_NOTES) for what we did. | |
51 | ||
52 | There are a few exceptions where the dataflow information created by | |
53 | flow.c aren't completely updated: | |
54 | ||
55 | - reg_live_length is not updated | |
230d793d | 56 | - a LOG_LINKS entry that refers to an insn with multiple SETs may be |
663522cb | 57 | removed because there is no way to know which register it was |
230d793d RS |
58 | linking |
59 | ||
60 | To simplify substitution, we combine only when the earlier insn(s) | |
61 | consist of only a single assignment. To simplify updating afterward, | |
62 | we never combine when a subroutine call appears in the middle. | |
63 | ||
64 | Since we do not represent assignments to CC0 explicitly except when that | |
65 | is all an insn does, there is no LOG_LINKS entry in an insn that uses | |
66 | the condition code for the insn that set the condition code. | |
67 | Fortunately, these two insns must be consecutive. | |
68 | Therefore, every JUMP_INSN is taken to have an implicit logical link | |
69 | to the preceding insn. This is not quite right, since non-jumps can | |
70 | also use the condition code; but in practice such insns would not | |
71 | combine anyway. */ | |
72 | ||
230d793d | 73 | #include "config.h" |
670ee920 | 74 | #include "system.h" |
4977bab6 ZW |
75 | #include "coretypes.h" |
76 | #include "tm.h" | |
c5c76735 | 77 | #include "rtl.h" |
61f71b34 | 78 | #include "tree.h" |
a091679a | 79 | #include "tm_p.h" |
230d793d RS |
80 | #include "flags.h" |
81 | #include "regs.h" | |
55310dad | 82 | #include "hard-reg-set.h" |
230d793d RS |
83 | #include "basic-block.h" |
84 | #include "insn-config.h" | |
49ad7cfa | 85 | #include "function.h" |
ec5c56db | 86 | /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */ |
d6f4ec51 | 87 | #include "expr.h" |
230d793d RS |
88 | #include "insn-attr.h" |
89 | #include "recog.h" | |
90 | #include "real.h" | |
2e107e9e | 91 | #include "toplev.h" |
61f71b34 | 92 | #include "target.h" |
2f93eea8 | 93 | #include "rtlhooks-def.h" |
64b8935d RS |
94 | /* Include output.h for dump_file. */ |
95 | #include "output.h" | |
f73ad30e | 96 | |
230d793d RS |
97 | /* Number of attempts to combine instructions in this function. */ |
98 | ||
99 | static int combine_attempts; | |
100 | ||
101 | /* Number of attempts that got as far as substitution in this function. */ | |
102 | ||
103 | static int combine_merges; | |
104 | ||
105 | /* Number of instructions combined with added SETs in this function. */ | |
106 | ||
107 | static int combine_extras; | |
108 | ||
109 | /* Number of instructions combined in this function. */ | |
110 | ||
111 | static int combine_successes; | |
112 | ||
113 | /* Totals over entire compilation. */ | |
114 | ||
115 | static int total_attempts, total_merges, total_extras, total_successes; | |
9210df58 | 116 | |
230d793d RS |
117 | \f |
118 | /* Vector mapping INSN_UIDs to cuids. | |
5089e22e | 119 | The cuids are like uids but increase monotonically always. |
230d793d RS |
120 | Combine always uses cuids so that it can compare them. |
121 | But actually renumbering the uids, which we used to do, | |
122 | proves to be a bad idea because it makes it hard to compare | |
123 | the dumps produced by earlier passes with those from later passes. */ | |
124 | ||
125 | static int *uid_cuid; | |
4255220d | 126 | static int max_uid_cuid; |
230d793d RS |
127 | |
128 | /* Get the cuid of an insn. */ | |
129 | ||
1427d6d2 RK |
130 | #define INSN_CUID(INSN) \ |
131 | (INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)]) | |
230d793d | 132 | |
42a6ff51 AO |
133 | /* In case BITS_PER_WORD == HOST_BITS_PER_WIDE_INT, shifting by |
134 | BITS_PER_WORD would invoke undefined behavior. Work around it. */ | |
135 | ||
136 | #define UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD(val) \ | |
505ddab6 | 137 | (((unsigned HOST_WIDE_INT) (val) << (BITS_PER_WORD - 1)) << 1) |
42a6ff51 | 138 | |
230d793d RS |
139 | /* Maximum register number, which is the size of the tables below. */ |
140 | ||
770ae6cc | 141 | static unsigned int combine_max_regno; |
230d793d | 142 | |
5eaad481 PB |
143 | struct reg_stat { |
144 | /* Record last point of death of (hard or pseudo) register n. */ | |
145 | rtx last_death; | |
230d793d | 146 | |
5eaad481 PB |
147 | /* Record last point of modification of (hard or pseudo) register n. */ |
148 | rtx last_set; | |
230d793d | 149 | |
5eaad481 PB |
150 | /* The next group of fields allows the recording of the last value assigned |
151 | to (hard or pseudo) register n. We use this information to see if an | |
152 | operation being processed is redundant given a prior operation performed | |
153 | on the register. For example, an `and' with a constant is redundant if | |
154 | all the zero bits are already known to be turned off. | |
230d793d | 155 | |
5eaad481 PB |
156 | We use an approach similar to that used by cse, but change it in the |
157 | following ways: | |
158 | ||
159 | (1) We do not want to reinitialize at each label. | |
160 | (2) It is useful, but not critical, to know the actual value assigned | |
161 | to a register. Often just its form is helpful. | |
162 | ||
163 | Therefore, we maintain the following fields: | |
164 | ||
165 | last_set_value the last value assigned | |
166 | last_set_label records the value of label_tick when the | |
167 | register was assigned | |
168 | last_set_table_tick records the value of label_tick when a | |
169 | value using the register is assigned | |
170 | last_set_invalid set to nonzero when it is not valid | |
171 | to use the value of this register in some | |
172 | register's value | |
173 | ||
174 | To understand the usage of these tables, it is important to understand | |
175 | the distinction between the value in last_set_value being valid and | |
176 | the register being validly contained in some other expression in the | |
177 | table. | |
178 | ||
179 | (The next two parameters are out of date). | |
180 | ||
181 | reg_stat[i].last_set_value is valid if it is nonzero, and either | |
182 | reg_n_sets[i] is 1 or reg_stat[i].last_set_label == label_tick. | |
183 | ||
184 | Register I may validly appear in any expression returned for the value | |
185 | of another register if reg_n_sets[i] is 1. It may also appear in the | |
186 | value for register J if reg_stat[j].last_set_invalid is zero, or | |
187 | reg_stat[i].last_set_label < reg_stat[j].last_set_label. | |
188 | ||
189 | If an expression is found in the table containing a register which may | |
190 | not validly appear in an expression, the register is replaced by | |
191 | something that won't match, (clobber (const_int 0)). */ | |
192 | ||
193 | /* Record last value assigned to (hard or pseudo) register n. */ | |
194 | ||
195 | rtx last_set_value; | |
196 | ||
197 | /* Record the value of label_tick when an expression involving register n | |
198 | is placed in last_set_value. */ | |
199 | ||
200 | int last_set_table_tick; | |
201 | ||
202 | /* Record the value of label_tick when the value for register n is placed in | |
203 | last_set_value. */ | |
204 | ||
205 | int last_set_label; | |
206 | ||
207 | /* These fields are maintained in parallel with last_set_value and are | |
324a6c95 | 208 | used to store the mode in which the register was last set, the bits |
5eaad481 PB |
209 | that were known to be zero when it was last set, and the number of |
210 | sign bits copies it was known to have when it was last set. */ | |
211 | ||
212 | unsigned HOST_WIDE_INT last_set_nonzero_bits; | |
213 | char last_set_sign_bit_copies; | |
214 | ENUM_BITFIELD(machine_mode) last_set_mode : 8; | |
215 | ||
216 | /* Set nonzero if references to register n in expressions should not be | |
217 | used. last_set_invalid is set nonzero when this register is being | |
218 | assigned to and last_set_table_tick == label_tick. */ | |
219 | ||
220 | char last_set_invalid; | |
221 | ||
222 | /* Some registers that are set more than once and used in more than one | |
223 | basic block are nevertheless always set in similar ways. For example, | |
224 | a QImode register may be loaded from memory in two places on a machine | |
225 | where byte loads zero extend. | |
226 | ||
227 | We record in the following fields if a register has some leading bits | |
228 | that are always equal to the sign bit, and what we know about the | |
229 | nonzero bits of a register, specifically which bits are known to be | |
230 | zero. | |
231 | ||
232 | If an entry is zero, it means that we don't know anything special. */ | |
233 | ||
234 | unsigned char sign_bit_copies; | |
235 | ||
236 | unsigned HOST_WIDE_INT nonzero_bits; | |
237 | }; | |
238 | ||
239 | static struct reg_stat *reg_stat; | |
230d793d RS |
240 | |
241 | /* Record the cuid of the last insn that invalidated memory | |
242 | (anything that writes memory, and subroutine calls, but not pushes). */ | |
243 | ||
244 | static int mem_last_set; | |
245 | ||
246 | /* Record the cuid of the last CALL_INSN | |
247 | so we can tell whether a potential combination crosses any calls. */ | |
248 | ||
249 | static int last_call_cuid; | |
250 | ||
251 | /* When `subst' is called, this is the insn that is being modified | |
252 | (by combining in a previous insn). The PATTERN of this insn | |
253 | is still the old pattern partially modified and it should not be | |
254 | looked at, but this may be used to examine the successors of the insn | |
255 | to judge whether a simplification is valid. */ | |
256 | ||
257 | static rtx subst_insn; | |
258 | ||
259 | /* This is the lowest CUID that `subst' is currently dealing with. | |
260 | get_last_value will not return a value if the register was set at or | |
261 | after this CUID. If not for this mechanism, we could get confused if | |
262 | I2 or I1 in try_combine were an insn that used the old value of a register | |
263 | to obtain a new value. In that case, we might erroneously get the | |
264 | new value of the register when we wanted the old one. */ | |
265 | ||
266 | static int subst_low_cuid; | |
267 | ||
6e25d159 RK |
268 | /* This contains any hard registers that are used in newpat; reg_dead_at_p |
269 | must consider all these registers to be always live. */ | |
270 | ||
271 | static HARD_REG_SET newpat_used_regs; | |
272 | ||
abe6e52f RK |
273 | /* This is an insn to which a LOG_LINKS entry has been added. If this |
274 | insn is the earlier than I2 or I3, combine should rescan starting at | |
275 | that location. */ | |
276 | ||
277 | static rtx added_links_insn; | |
278 | ||
f6366fc7 ZD |
279 | /* Basic block in which we are performing combines. */ |
280 | static basic_block this_basic_block; | |
715e7fbc | 281 | |
663522cb KH |
282 | /* A bitmap indicating which blocks had registers go dead at entry. |
283 | After combine, we'll need to re-do global life analysis with | |
715e7fbc RH |
284 | those blocks as starting points. */ |
285 | static sbitmap refresh_blocks; | |
230d793d | 286 | \f |
6fd21094 | 287 | /* The following array records the insn_rtx_cost for every insn |
64b8935d RS |
288 | in the instruction stream. */ |
289 | ||
290 | static int *uid_insn_cost; | |
291 | ||
292 | /* Length of the currently allocated uid_insn_cost array. */ | |
293 | ||
294 | static int last_insn_cost; | |
295 | ||
0f41302f | 296 | /* Incremented for each label. */ |
230d793d | 297 | |
568356af | 298 | static int label_tick; |
230d793d | 299 | |
5eaad481 PB |
300 | /* Mode used to compute significance in reg_stat[].nonzero_bits. It is the |
301 | largest integer mode that can fit in HOST_BITS_PER_WIDE_INT. */ | |
230d793d | 302 | |
951553af | 303 | static enum machine_mode nonzero_bits_mode; |
230d793d | 304 | |
5eaad481 PB |
305 | /* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can |
306 | be safely used. It is zero while computing them and after combine has | |
307 | completed. This former test prevents propagating values based on | |
308 | previously set values, which can be incorrect if a variable is modified | |
309 | in a loop. */ | |
230d793d | 310 | |
951553af | 311 | static int nonzero_sign_valid; |
55310dad | 312 | |
230d793d RS |
313 | \f |
314 | /* Record one modification to rtl structure | |
315 | to be undone by storing old_contents into *where. | |
316 | is_int is 1 if the contents are an int. */ | |
317 | ||
318 | struct undo | |
319 | { | |
241cea85 | 320 | struct undo *next; |
230d793d | 321 | int is_int; |
3129af4c RS |
322 | union {rtx r; int i;} old_contents; |
323 | union {rtx *r; int *i;} where; | |
230d793d RS |
324 | }; |
325 | ||
326 | /* Record a bunch of changes to be undone, up to MAX_UNDO of them. | |
327 | num_undo says how many are currently recorded. | |
328 | ||
230d793d | 329 | other_insn is nonzero if we have modified some other insn in the process |
f1c6ba8b | 330 | of working on subst_insn. It must be verified too. */ |
230d793d RS |
331 | |
332 | struct undobuf | |
333 | { | |
241cea85 RK |
334 | struct undo *undos; |
335 | struct undo *frees; | |
230d793d RS |
336 | rtx other_insn; |
337 | }; | |
338 | ||
339 | static struct undobuf undobuf; | |
340 | ||
230d793d RS |
341 | /* Number of times the pseudo being substituted for |
342 | was found and replaced. */ | |
343 | ||
344 | static int n_occurrences; | |
345 | ||
2f93eea8 PB |
346 | static rtx reg_nonzero_bits_for_combine (rtx, enum machine_mode, rtx, |
347 | enum machine_mode, | |
348 | unsigned HOST_WIDE_INT, | |
349 | unsigned HOST_WIDE_INT *); | |
350 | static rtx reg_num_sign_bit_copies_for_combine (rtx, enum machine_mode, rtx, | |
351 | enum machine_mode, | |
352 | unsigned int, unsigned int *); | |
79a490a9 AJ |
353 | static void do_SUBST (rtx *, rtx); |
354 | static void do_SUBST_INT (int *, int); | |
5eaad481 | 355 | static void init_reg_last (void); |
79a490a9 AJ |
356 | static void setup_incoming_promotions (void); |
357 | static void set_nonzero_bits_and_sign_copies (rtx, rtx, void *); | |
358 | static int cant_combine_insn_p (rtx); | |
359 | static int can_combine_p (rtx, rtx, rtx, rtx, rtx *, rtx *); | |
79a490a9 AJ |
360 | static int combinable_i3pat (rtx, rtx *, rtx, rtx, int, rtx *); |
361 | static int contains_muldiv (rtx); | |
362 | static rtx try_combine (rtx, rtx, rtx, int *); | |
363 | static void undo_all (void); | |
364 | static void undo_commit (void); | |
365 | static rtx *find_split_point (rtx *, rtx); | |
366 | static rtx subst (rtx, rtx, rtx, int, int); | |
6621d78e | 367 | static rtx combine_simplify_rtx (rtx, enum machine_mode, int); |
79a490a9 AJ |
368 | static rtx simplify_if_then_else (rtx); |
369 | static rtx simplify_set (rtx); | |
6621d78e | 370 | static rtx simplify_logical (rtx); |
79a490a9 AJ |
371 | static rtx expand_compound_operation (rtx); |
372 | static rtx expand_field_assignment (rtx); | |
373 | static rtx make_extraction (enum machine_mode, rtx, HOST_WIDE_INT, | |
374 | rtx, unsigned HOST_WIDE_INT, int, int, int); | |
375 | static rtx extract_left_shift (rtx, int); | |
376 | static rtx make_compound_operation (rtx, enum rtx_code); | |
377 | static int get_pos_from_mask (unsigned HOST_WIDE_INT, | |
378 | unsigned HOST_WIDE_INT *); | |
379 | static rtx force_to_mode (rtx, enum machine_mode, | |
380 | unsigned HOST_WIDE_INT, rtx, int); | |
381 | static rtx if_then_else_cond (rtx, rtx *, rtx *); | |
382 | static rtx known_cond (rtx, enum rtx_code, rtx, rtx); | |
383 | static int rtx_equal_for_field_assignment_p (rtx, rtx); | |
384 | static rtx make_field_assignment (rtx); | |
385 | static rtx apply_distributive_law (rtx); | |
386 | static rtx simplify_and_const_int (rtx, enum machine_mode, rtx, | |
387 | unsigned HOST_WIDE_INT); | |
79a490a9 AJ |
388 | static int merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code, |
389 | HOST_WIDE_INT, enum machine_mode, int *); | |
390 | static rtx simplify_shift_const (rtx, enum rtx_code, enum machine_mode, rtx, | |
391 | int); | |
392 | static int recog_for_combine (rtx *, rtx, rtx *); | |
393 | static rtx gen_lowpart_for_combine (enum machine_mode, rtx); | |
1999435c | 394 | static rtx gen_binary (enum rtx_code, enum machine_mode, rtx, rtx); |
79a490a9 AJ |
395 | static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *); |
396 | static void update_table_tick (rtx); | |
397 | static void record_value_for_reg (rtx, rtx, rtx); | |
398 | static void check_promoted_subreg (rtx, rtx); | |
399 | static void record_dead_and_set_regs_1 (rtx, rtx, void *); | |
400 | static void record_dead_and_set_regs (rtx); | |
401 | static int get_last_value_validate (rtx *, rtx, int, int); | |
402 | static rtx get_last_value (rtx); | |
403 | static int use_crosses_set_p (rtx, int); | |
404 | static void reg_dead_at_p_1 (rtx, rtx, void *); | |
405 | static int reg_dead_at_p (rtx, rtx); | |
406 | static void move_deaths (rtx, rtx, int, rtx, rtx *); | |
407 | static int reg_bitfield_target_p (rtx, rtx); | |
408 | static void distribute_notes (rtx, rtx, rtx, rtx); | |
409 | static void distribute_links (rtx); | |
410 | static void mark_used_regs_combine (rtx); | |
411 | static int insn_cuid (rtx); | |
412 | static void record_promoted_value (rtx, rtx); | |
413 | static rtx reversed_comparison (rtx, enum machine_mode, rtx, rtx); | |
414 | static enum rtx_code combine_reversed_comparison_code (rtx); | |
67962db5 RS |
415 | static int unmentioned_reg_p_1 (rtx *, void *); |
416 | static bool unmentioned_reg_p (rtx, rtx); | |
2f93eea8 PB |
417 | \f |
418 | ||
419 | /* It is not safe to use ordinary gen_lowpart in combine. | |
420 | See comments in gen_lowpart_for_combine. */ | |
421 | #undef RTL_HOOKS_GEN_LOWPART | |
422 | #define RTL_HOOKS_GEN_LOWPART gen_lowpart_for_combine | |
423 | ||
424 | #undef RTL_HOOKS_REG_NONZERO_REG_BITS | |
425 | #define RTL_HOOKS_REG_NONZERO_REG_BITS reg_nonzero_bits_for_combine | |
426 | ||
427 | #undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES | |
428 | #define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES reg_num_sign_bit_copies_for_combine | |
429 | ||
430 | static const struct rtl_hooks combine_rtl_hooks = RTL_HOOKS_INITIALIZER; | |
431 | ||
230d793d | 432 | \f |
76095e2f RH |
433 | /* Substitute NEWVAL, an rtx expression, into INTO, a place in some |
434 | insn. The substitution can be undone by undo_all. If INTO is already | |
435 | set to NEWVAL, do not record this change. Because computing NEWVAL might | |
436 | also call SUBST, we have to compute it before we put anything into | |
437 | the undo table. */ | |
438 | ||
439 | static void | |
79a490a9 | 440 | do_SUBST (rtx *into, rtx newval) |
76095e2f RH |
441 | { |
442 | struct undo *buf; | |
443 | rtx oldval = *into; | |
444 | ||
445 | if (oldval == newval) | |
446 | return; | |
447 | ||
4161da12 AO |
448 | /* We'd like to catch as many invalid transformations here as |
449 | possible. Unfortunately, there are way too many mode changes | |
450 | that are perfectly valid, so we'd waste too much effort for | |
451 | little gain doing the checks here. Focus on catching invalid | |
452 | transformations involving integer constants. */ | |
453 | if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT | |
454 | && GET_CODE (newval) == CONST_INT) | |
455 | { | |
456 | /* Sanity check that we're replacing oldval with a CONST_INT | |
457 | that is a valid sign-extension for the original mode. */ | |
341c100f NS |
458 | gcc_assert (INTVAL (newval) |
459 | == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval))); | |
4161da12 AO |
460 | |
461 | /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a | |
462 | CONST_INT is not valid, because after the replacement, the | |
463 | original mode would be gone. Unfortunately, we can't tell | |
464 | when do_SUBST is called to replace the operand thereof, so we | |
465 | perform this test on oldval instead, checking whether an | |
466 | invalid replacement took place before we got here. */ | |
341c100f NS |
467 | gcc_assert (!(GET_CODE (oldval) == SUBREG |
468 | && GET_CODE (SUBREG_REG (oldval)) == CONST_INT)); | |
469 | gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND | |
470 | && GET_CODE (XEXP (oldval, 0)) == CONST_INT)); | |
e869aa39 | 471 | } |
4161da12 | 472 | |
76095e2f RH |
473 | if (undobuf.frees) |
474 | buf = undobuf.frees, undobuf.frees = buf->next; | |
475 | else | |
703ad42b | 476 | buf = xmalloc (sizeof (struct undo)); |
76095e2f RH |
477 | |
478 | buf->is_int = 0; | |
479 | buf->where.r = into; | |
480 | buf->old_contents.r = oldval; | |
481 | *into = newval; | |
482 | ||
483 | buf->next = undobuf.undos, undobuf.undos = buf; | |
484 | } | |
485 | ||
486 | #define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL)) | |
487 | ||
488 | /* Similar to SUBST, but NEWVAL is an int expression. Note that substitution | |
489 | for the value of a HOST_WIDE_INT value (including CONST_INT) is | |
490 | not safe. */ | |
491 | ||
492 | static void | |
79a490a9 | 493 | do_SUBST_INT (int *into, int newval) |
76095e2f RH |
494 | { |
495 | struct undo *buf; | |
3129af4c | 496 | int oldval = *into; |
76095e2f RH |
497 | |
498 | if (oldval == newval) | |
499 | return; | |
500 | ||
501 | if (undobuf.frees) | |
502 | buf = undobuf.frees, undobuf.frees = buf->next; | |
503 | else | |
703ad42b | 504 | buf = xmalloc (sizeof (struct undo)); |
76095e2f RH |
505 | |
506 | buf->is_int = 1; | |
507 | buf->where.i = into; | |
508 | buf->old_contents.i = oldval; | |
509 | *into = newval; | |
510 | ||
511 | buf->next = undobuf.undos, undobuf.undos = buf; | |
512 | } | |
513 | ||
514 | #define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL)) | |
515 | \f | |
64b8935d | 516 | /* Subroutine of try_combine. Determine whether the combine replacement |
6fd21094 | 517 | patterns NEWPAT and NEWI2PAT are cheaper according to insn_rtx_cost |
64b8935d RS |
518 | that the original instruction sequence I1, I2 and I3. Note that I1 |
519 | and/or NEWI2PAT may be NULL_RTX. This function returns false, if the | |
520 | costs of all instructions can be estimated, and the replacements are | |
521 | more expensive than the original sequence. */ | |
522 | ||
523 | static bool | |
524 | combine_validate_cost (rtx i1, rtx i2, rtx i3, rtx newpat, rtx newi2pat) | |
525 | { | |
526 | int i1_cost, i2_cost, i3_cost; | |
527 | int new_i2_cost, new_i3_cost; | |
528 | int old_cost, new_cost; | |
529 | ||
6fd21094 | 530 | /* Lookup the original insn_rtx_costs. */ |
64b8935d RS |
531 | i2_cost = INSN_UID (i2) <= last_insn_cost |
532 | ? uid_insn_cost[INSN_UID (i2)] : 0; | |
533 | i3_cost = INSN_UID (i3) <= last_insn_cost | |
534 | ? uid_insn_cost[INSN_UID (i3)] : 0; | |
535 | ||
536 | if (i1) | |
537 | { | |
538 | i1_cost = INSN_UID (i1) <= last_insn_cost | |
539 | ? uid_insn_cost[INSN_UID (i1)] : 0; | |
540 | old_cost = (i1_cost > 0 && i2_cost > 0 && i3_cost > 0) | |
541 | ? i1_cost + i2_cost + i3_cost : 0; | |
542 | } | |
543 | else | |
544 | { | |
545 | old_cost = (i2_cost > 0 && i3_cost > 0) ? i2_cost + i3_cost : 0; | |
546 | i1_cost = 0; | |
547 | } | |
548 | ||
6fd21094 RS |
549 | /* Calculate the replacement insn_rtx_costs. */ |
550 | new_i3_cost = insn_rtx_cost (newpat); | |
64b8935d RS |
551 | if (newi2pat) |
552 | { | |
6fd21094 | 553 | new_i2_cost = insn_rtx_cost (newi2pat); |
64b8935d RS |
554 | new_cost = (new_i2_cost > 0 && new_i3_cost > 0) |
555 | ? new_i2_cost + new_i3_cost : 0; | |
556 | } | |
557 | else | |
558 | { | |
559 | new_cost = new_i3_cost; | |
560 | new_i2_cost = 0; | |
561 | } | |
562 | ||
6bd26f0b ILT |
563 | if (undobuf.other_insn) |
564 | { | |
565 | int old_other_cost, new_other_cost; | |
566 | ||
567 | old_other_cost = (INSN_UID (undobuf.other_insn) <= last_insn_cost | |
568 | ? uid_insn_cost[INSN_UID (undobuf.other_insn)] : 0); | |
569 | new_other_cost = insn_rtx_cost (PATTERN (undobuf.other_insn)); | |
570 | if (old_other_cost > 0 && new_other_cost > 0) | |
571 | { | |
572 | old_cost += old_other_cost; | |
573 | new_cost += new_other_cost; | |
574 | } | |
575 | else | |
576 | old_cost = 0; | |
577 | } | |
578 | ||
64b8935d RS |
579 | /* Disallow this recombination if both new_cost and old_cost are |
580 | greater than zero, and new_cost is greater than old cost. */ | |
6bd26f0b | 581 | if (old_cost > 0 |
64b8935d RS |
582 | && new_cost > old_cost) |
583 | { | |
584 | if (dump_file) | |
585 | { | |
586 | if (i1) | |
587 | { | |
588 | fprintf (dump_file, | |
589 | "rejecting combination of insns %d, %d and %d\n", | |
590 | INSN_UID (i1), INSN_UID (i2), INSN_UID (i3)); | |
591 | fprintf (dump_file, "original costs %d + %d + %d = %d\n", | |
592 | i1_cost, i2_cost, i3_cost, old_cost); | |
593 | } | |
594 | else | |
595 | { | |
596 | fprintf (dump_file, | |
597 | "rejecting combination of insns %d and %d\n", | |
598 | INSN_UID (i2), INSN_UID (i3)); | |
599 | fprintf (dump_file, "original costs %d + %d = %d\n", | |
600 | i2_cost, i3_cost, old_cost); | |
601 | } | |
602 | ||
603 | if (newi2pat) | |
604 | { | |
605 | fprintf (dump_file, "replacement costs %d + %d = %d\n", | |
606 | new_i2_cost, new_i3_cost, new_cost); | |
607 | } | |
608 | else | |
609 | fprintf (dump_file, "replacement cost %d\n", new_cost); | |
610 | } | |
611 | ||
612 | return false; | |
613 | } | |
614 | ||
615 | /* Update the uid_insn_cost array with the replacement costs. */ | |
616 | uid_insn_cost[INSN_UID (i2)] = new_i2_cost; | |
617 | uid_insn_cost[INSN_UID (i3)] = new_i3_cost; | |
618 | if (i1) | |
619 | uid_insn_cost[INSN_UID (i1)] = 0; | |
620 | ||
621 | return true; | |
622 | } | |
623 | \f | |
230d793d | 624 | /* Main entry point for combiner. F is the first insn of the function. |
663522cb | 625 | NREGS is the first unused pseudo-reg number. |
230d793d | 626 | |
da7d8304 | 627 | Return nonzero if the combiner has turned an indirect jump |
44a76fc8 AG |
628 | instruction into a direct jump. */ |
629 | int | |
79a490a9 | 630 | combine_instructions (rtx f, unsigned int nregs) |
230d793d | 631 | { |
b3694847 | 632 | rtx insn, next; |
b729186a | 633 | #ifdef HAVE_cc0 |
b3694847 | 634 | rtx prev; |
b729186a | 635 | #endif |
b3694847 SS |
636 | int i; |
637 | rtx links, nextlinks; | |
230d793d | 638 | |
44a76fc8 AG |
639 | int new_direct_jump_p = 0; |
640 | ||
230d793d RS |
641 | combine_attempts = 0; |
642 | combine_merges = 0; | |
643 | combine_extras = 0; | |
644 | combine_successes = 0; | |
645 | ||
646 | combine_max_regno = nregs; | |
647 | ||
2f93eea8 | 648 | rtl_hooks = combine_rtl_hooks; |
4de249d9 | 649 | |
5eaad481 | 650 | reg_stat = xcalloc (nregs, sizeof (struct reg_stat)); |
230d793d RS |
651 | |
652 | init_recog_no_volatile (); | |
653 | ||
654 | /* Compute maximum uid value so uid_cuid can be allocated. */ | |
655 | ||
656 | for (insn = f, i = 0; insn; insn = NEXT_INSN (insn)) | |
657 | if (INSN_UID (insn) > i) | |
658 | i = INSN_UID (insn); | |
659 | ||
703ad42b | 660 | uid_cuid = xmalloc ((i + 1) * sizeof (int)); |
4255220d | 661 | max_uid_cuid = i; |
230d793d | 662 | |
951553af | 663 | nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0); |
230d793d | 664 | |
5eaad481 PB |
665 | /* Don't use reg_stat[].nonzero_bits when computing it. This can cause |
666 | problems when, for example, we have j <<= 1 in a loop. */ | |
230d793d | 667 | |
951553af | 668 | nonzero_sign_valid = 0; |
230d793d RS |
669 | |
670 | /* Compute the mapping from uids to cuids. | |
671 | Cuids are numbers assigned to insns, like uids, | |
663522cb | 672 | except that cuids increase monotonically through the code. |
230d793d RS |
673 | |
674 | Scan all SETs and see if we can deduce anything about what | |
951553af | 675 | bits are known to be zero for some registers and how many copies |
d79f08e0 RK |
676 | of the sign bit are known to exist for those registers. |
677 | ||
678 | Also set any known values so that we can use it while searching | |
679 | for what bits are known to be set. */ | |
680 | ||
681 | label_tick = 1; | |
230d793d | 682 | |
7988fd36 RK |
683 | setup_incoming_promotions (); |
684 | ||
d55bc081 | 685 | refresh_blocks = sbitmap_alloc (last_basic_block); |
715e7fbc | 686 | sbitmap_zero (refresh_blocks); |
715e7fbc | 687 | |
6fd21094 | 688 | /* Allocate array of current insn_rtx_costs. */ |
64b8935d RS |
689 | uid_insn_cost = xcalloc (max_uid_cuid + 1, sizeof (int)); |
690 | last_insn_cost = max_uid_cuid; | |
691 | ||
230d793d RS |
692 | for (insn = f, i = 0; insn; insn = NEXT_INSN (insn)) |
693 | { | |
4255220d | 694 | uid_cuid[INSN_UID (insn)] = ++i; |
d79f08e0 RK |
695 | subst_low_cuid = i; |
696 | subst_insn = insn; | |
697 | ||
2c3c49de | 698 | if (INSN_P (insn)) |
d79f08e0 | 699 | { |
663522cb | 700 | note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies, |
84832317 | 701 | NULL); |
d79f08e0 | 702 | record_dead_and_set_regs (insn); |
2dab894a RK |
703 | |
704 | #ifdef AUTO_INC_DEC | |
705 | for (links = REG_NOTES (insn); links; links = XEXP (links, 1)) | |
706 | if (REG_NOTE_KIND (links) == REG_INC) | |
84832317 MM |
707 | set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX, |
708 | NULL); | |
2dab894a | 709 | #endif |
64b8935d | 710 | |
6fd21094 RS |
711 | /* Record the current insn_rtx_cost of this instruction. */ |
712 | if (NONJUMP_INSN_P (insn)) | |
713 | uid_insn_cost[INSN_UID (insn)] = insn_rtx_cost (PATTERN (insn)); | |
64b8935d RS |
714 | if (dump_file) |
715 | fprintf(dump_file, "insn_cost %d: %d\n", | |
716 | INSN_UID (insn), uid_insn_cost[INSN_UID (insn)]); | |
d79f08e0 RK |
717 | } |
718 | ||
4b4bf941 | 719 | if (LABEL_P (insn)) |
d79f08e0 | 720 | label_tick++; |
230d793d RS |
721 | } |
722 | ||
951553af | 723 | nonzero_sign_valid = 1; |
230d793d RS |
724 | |
725 | /* Now scan all the insns in forward order. */ | |
726 | ||
727 | label_tick = 1; | |
728 | last_call_cuid = 0; | |
729 | mem_last_set = 0; | |
5eaad481 | 730 | init_reg_last (); |
7988fd36 RK |
731 | setup_incoming_promotions (); |
732 | ||
e0082a72 | 733 | FOR_EACH_BB (this_basic_block) |
230d793d | 734 | { |
a813c111 SB |
735 | for (insn = BB_HEAD (this_basic_block); |
736 | insn != NEXT_INSN (BB_END (this_basic_block)); | |
e0082a72 | 737 | insn = next ? next : NEXT_INSN (insn)) |
230d793d | 738 | { |
e0082a72 | 739 | next = 0; |
aabb6c74 | 740 | |
4b4bf941 | 741 | if (LABEL_P (insn)) |
e0082a72 | 742 | label_tick++; |
aabb6c74 | 743 | |
e0082a72 | 744 | else if (INSN_P (insn)) |
0b17ab2f | 745 | { |
e0082a72 ZD |
746 | /* See if we know about function return values before this |
747 | insn based upon SUBREG flags. */ | |
748 | check_promoted_subreg (insn, PATTERN (insn)); | |
230d793d | 749 | |
e0082a72 | 750 | /* Try this insn with each insn it links back to. */ |
230d793d | 751 | |
e0082a72 ZD |
752 | for (links = LOG_LINKS (insn); links; links = XEXP (links, 1)) |
753 | if ((next = try_combine (insn, XEXP (links, 0), | |
754 | NULL_RTX, &new_direct_jump_p)) != 0) | |
230d793d | 755 | goto retry; |
0b17ab2f | 756 | |
e0082a72 ZD |
757 | /* Try each sequence of three linked insns ending with this one. */ |
758 | ||
759 | for (links = LOG_LINKS (insn); links; links = XEXP (links, 1)) | |
760 | { | |
761 | rtx link = XEXP (links, 0); | |
762 | ||
763 | /* If the linked insn has been replaced by a note, then there | |
764 | is no point in pursuing this chain any further. */ | |
4b4bf941 | 765 | if (NOTE_P (link)) |
e0082a72 ZD |
766 | continue; |
767 | ||
768 | for (nextlinks = LOG_LINKS (link); | |
769 | nextlinks; | |
770 | nextlinks = XEXP (nextlinks, 1)) | |
771 | if ((next = try_combine (insn, link, | |
772 | XEXP (nextlinks, 0), | |
773 | &new_direct_jump_p)) != 0) | |
774 | goto retry; | |
775 | } | |
230d793d | 776 | |
9b89393b | 777 | #ifdef HAVE_cc0 |
e0082a72 ZD |
778 | /* Try to combine a jump insn that uses CC0 |
779 | with a preceding insn that sets CC0, and maybe with its | |
780 | logical predecessor as well. | |
781 | This is how we make decrement-and-branch insns. | |
782 | We need this special code because data flow connections | |
783 | via CC0 do not get entered in LOG_LINKS. */ | |
784 | ||
4b4bf941 | 785 | if (JUMP_P (insn) |
e0082a72 | 786 | && (prev = prev_nonnote_insn (insn)) != 0 |
4b4bf941 | 787 | && NONJUMP_INSN_P (prev) |
e0082a72 ZD |
788 | && sets_cc0_p (PATTERN (prev))) |
789 | { | |
790 | if ((next = try_combine (insn, prev, | |
791 | NULL_RTX, &new_direct_jump_p)) != 0) | |
792 | goto retry; | |
793 | ||
794 | for (nextlinks = LOG_LINKS (prev); nextlinks; | |
795 | nextlinks = XEXP (nextlinks, 1)) | |
796 | if ((next = try_combine (insn, prev, | |
797 | XEXP (nextlinks, 0), | |
798 | &new_direct_jump_p)) != 0) | |
799 | goto retry; | |
800 | } | |
230d793d | 801 | |
e0082a72 | 802 | /* Do the same for an insn that explicitly references CC0. */ |
4b4bf941 | 803 | if (NONJUMP_INSN_P (insn) |
e0082a72 | 804 | && (prev = prev_nonnote_insn (insn)) != 0 |
4b4bf941 | 805 | && NONJUMP_INSN_P (prev) |
e0082a72 ZD |
806 | && sets_cc0_p (PATTERN (prev)) |
807 | && GET_CODE (PATTERN (insn)) == SET | |
808 | && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn)))) | |
809 | { | |
810 | if ((next = try_combine (insn, prev, | |
811 | NULL_RTX, &new_direct_jump_p)) != 0) | |
812 | goto retry; | |
813 | ||
814 | for (nextlinks = LOG_LINKS (prev); nextlinks; | |
815 | nextlinks = XEXP (nextlinks, 1)) | |
816 | if ((next = try_combine (insn, prev, | |
817 | XEXP (nextlinks, 0), | |
818 | &new_direct_jump_p)) != 0) | |
819 | goto retry; | |
820 | } | |
230d793d | 821 | |
e0082a72 ZD |
822 | /* Finally, see if any of the insns that this insn links to |
823 | explicitly references CC0. If so, try this insn, that insn, | |
824 | and its predecessor if it sets CC0. */ | |
825 | for (links = LOG_LINKS (insn); links; links = XEXP (links, 1)) | |
4b4bf941 | 826 | if (NONJUMP_INSN_P (XEXP (links, 0)) |
e0082a72 ZD |
827 | && GET_CODE (PATTERN (XEXP (links, 0))) == SET |
828 | && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0)))) | |
829 | && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0 | |
4b4bf941 | 830 | && NONJUMP_INSN_P (prev) |
e0082a72 ZD |
831 | && sets_cc0_p (PATTERN (prev)) |
832 | && (next = try_combine (insn, XEXP (links, 0), | |
833 | prev, &new_direct_jump_p)) != 0) | |
834 | goto retry; | |
9b89393b | 835 | #endif |
e0082a72 ZD |
836 | |
837 | /* Try combining an insn with two different insns whose results it | |
838 | uses. */ | |
839 | for (links = LOG_LINKS (insn); links; links = XEXP (links, 1)) | |
840 | for (nextlinks = XEXP (links, 1); nextlinks; | |
841 | nextlinks = XEXP (nextlinks, 1)) | |
842 | if ((next = try_combine (insn, XEXP (links, 0), | |
843 | XEXP (nextlinks, 0), | |
844 | &new_direct_jump_p)) != 0) | |
845 | goto retry; | |
846 | ||
67962db5 RS |
847 | /* Try this insn with each REG_EQUAL note it links back to. */ |
848 | for (links = LOG_LINKS (insn); links; links = XEXP (links, 1)) | |
849 | { | |
850 | rtx set, note; | |
851 | rtx temp = XEXP (links, 0); | |
852 | if ((set = single_set (temp)) != 0 | |
853 | && (note = find_reg_equal_equiv_note (temp)) != 0 | |
854 | && GET_CODE (XEXP (note, 0)) != EXPR_LIST | |
855 | /* Avoid using a register that may already been marked | |
856 | dead by an earlier instruction. */ | |
857 | && ! unmentioned_reg_p (XEXP (note, 0), SET_SRC (set))) | |
858 | { | |
859 | /* Temporarily replace the set's source with the | |
860 | contents of the REG_EQUAL note. The insn will | |
861 | be deleted or recognized by try_combine. */ | |
862 | rtx orig = SET_SRC (set); | |
863 | SET_SRC (set) = XEXP (note, 0); | |
864 | next = try_combine (insn, temp, NULL_RTX, | |
865 | &new_direct_jump_p); | |
866 | if (next) | |
867 | goto retry; | |
868 | SET_SRC (set) = orig; | |
869 | } | |
870 | } | |
871 | ||
4b4bf941 | 872 | if (!NOTE_P (insn)) |
e0082a72 ZD |
873 | record_dead_and_set_regs (insn); |
874 | ||
875 | retry: | |
876 | ; | |
877 | } | |
230d793d RS |
878 | } |
879 | } | |
c51d95ec | 880 | clear_bb_flags (); |
230d793d | 881 | |
f6366fc7 ZD |
882 | EXECUTE_IF_SET_IN_SBITMAP (refresh_blocks, 0, i, |
883 | BASIC_BLOCK (i)->flags |= BB_DIRTY); | |
c51d95ec | 884 | new_direct_jump_p |= purge_all_dead_edges (0); |
827c06b6 | 885 | delete_noop_moves (); |
0005550b | 886 | |
c51d95ec JH |
887 | update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES, |
888 | PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE | |
889 | | PROP_KILL_DEAD_CODE); | |
c05ddfa7 MM |
890 | |
891 | /* Clean up. */ | |
715e7fbc | 892 | sbitmap_free (refresh_blocks); |
64b8935d | 893 | free (uid_insn_cost); |
5eaad481 | 894 | free (reg_stat); |
c05ddfa7 | 895 | free (uid_cuid); |
715e7fbc | 896 | |
e7749837 RH |
897 | { |
898 | struct undo *undo, *next; | |
899 | for (undo = undobuf.frees; undo; undo = next) | |
900 | { | |
901 | next = undo->next; | |
902 | free (undo); | |
903 | } | |
904 | undobuf.frees = 0; | |
905 | } | |
906 | ||
230d793d RS |
907 | total_attempts += combine_attempts; |
908 | total_merges += combine_merges; | |
909 | total_extras += combine_extras; | |
910 | total_successes += combine_successes; | |
1a26b032 | 911 | |
951553af | 912 | nonzero_sign_valid = 0; |
2f93eea8 | 913 | rtl_hooks = general_rtl_hooks; |
972b320c R |
914 | |
915 | /* Make recognizer allow volatile MEMs again. */ | |
916 | init_recog (); | |
44a76fc8 AG |
917 | |
918 | return new_direct_jump_p; | |
230d793d | 919 | } |
ef026f91 | 920 | |
5eaad481 | 921 | /* Wipe the last_xxx fields of reg_stat in preparation for another pass. */ |
ef026f91 RS |
922 | |
923 | static void | |
5eaad481 | 924 | init_reg_last (void) |
ef026f91 | 925 | { |
5eaad481 PB |
926 | unsigned int i; |
927 | for (i = 0; i < combine_max_regno; i++) | |
928 | memset (reg_stat + i, 0, offsetof (struct reg_stat, sign_bit_copies)); | |
ef026f91 | 929 | } |
230d793d | 930 | \f |
7988fd36 RK |
931 | /* Set up any promoted values for incoming argument registers. */ |
932 | ||
ee791cc3 | 933 | static void |
79a490a9 | 934 | setup_incoming_promotions (void) |
7988fd36 | 935 | { |
770ae6cc | 936 | unsigned int regno; |
7988fd36 RK |
937 | rtx reg; |
938 | enum machine_mode mode; | |
939 | int unsignedp; | |
940 | rtx first = get_insns (); | |
941 | ||
61f71b34 DD |
942 | if (targetm.calls.promote_function_args (TREE_TYPE (cfun->decl))) |
943 | { | |
61f71b34 DD |
944 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
945 | /* Check whether this register can hold an incoming pointer | |
946 | argument. FUNCTION_ARG_REGNO_P tests outgoing register | |
947 | numbers, so translate if necessary due to register windows. */ | |
948 | if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno)) | |
949 | && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0) | |
950 | { | |
951 | record_value_for_reg | |
952 | (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND | |
953 | : SIGN_EXTEND), | |
954 | GET_MODE (reg), | |
955 | gen_rtx_CLOBBER (mode, const0_rtx))); | |
956 | } | |
957 | } | |
7988fd36 RK |
958 | } |
959 | \f | |
91102d5a RK |
960 | /* Called via note_stores. If X is a pseudo that is narrower than |
961 | HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero. | |
230d793d RS |
962 | |
963 | If we are setting only a portion of X and we can't figure out what | |
964 | portion, assume all bits will be used since we don't know what will | |
d0ab8cd3 RK |
965 | be happening. |
966 | ||
967 | Similarly, set how many bits of X are known to be copies of the sign bit | |
663522cb | 968 | at all locations in the function. This is the smallest number implied |
d0ab8cd3 | 969 | by any set of X. */ |
230d793d RS |
970 | |
971 | static void | |
79a490a9 AJ |
972 | set_nonzero_bits_and_sign_copies (rtx x, rtx set, |
973 | void *data ATTRIBUTE_UNUSED) | |
230d793d | 974 | { |
770ae6cc | 975 | unsigned int num; |
d0ab8cd3 | 976 | |
f8cfc6aa | 977 | if (REG_P (x) |
230d793d | 978 | && REGNO (x) >= FIRST_PSEUDO_REGISTER |
e8095e80 RK |
979 | /* If this register is undefined at the start of the file, we can't |
980 | say what its contents were. */ | |
f6366fc7 | 981 | && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, REGNO (x)) |
5f4f0e22 | 982 | && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT) |
230d793d | 983 | { |
2dab894a | 984 | if (set == 0 || GET_CODE (set) == CLOBBER) |
e8095e80 | 985 | { |
5eaad481 PB |
986 | reg_stat[REGNO (x)].nonzero_bits = GET_MODE_MASK (GET_MODE (x)); |
987 | reg_stat[REGNO (x)].sign_bit_copies = 1; | |
e8095e80 RK |
988 | return; |
989 | } | |
230d793d RS |
990 | |
991 | /* If this is a complex assignment, see if we can convert it into a | |
5089e22e | 992 | simple assignment. */ |
230d793d | 993 | set = expand_field_assignment (set); |
d79f08e0 RK |
994 | |
995 | /* If this is a simple assignment, or we have a paradoxical SUBREG, | |
996 | set what we know about X. */ | |
997 | ||
998 | if (SET_DEST (set) == x | |
999 | || (GET_CODE (SET_DEST (set)) == SUBREG | |
705c7b3b JW |
1000 | && (GET_MODE_SIZE (GET_MODE (SET_DEST (set))) |
1001 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set))))) | |
d79f08e0 | 1002 | && SUBREG_REG (SET_DEST (set)) == x)) |
d0ab8cd3 | 1003 | { |
9afa3d54 RK |
1004 | rtx src = SET_SRC (set); |
1005 | ||
1006 | #ifdef SHORT_IMMEDIATES_SIGN_EXTEND | |
1007 | /* If X is narrower than a word and SRC is a non-negative | |
1008 | constant that would appear negative in the mode of X, | |
5eaad481 | 1009 | sign-extend it for use in reg_stat[].nonzero_bits because some |
9afa3d54 | 1010 | machines (maybe most) will actually do the sign-extension |
663522cb | 1011 | and this is the conservative approach. |
9afa3d54 RK |
1012 | |
1013 | ??? For 2.5, try to tighten up the MD files in this regard | |
1014 | instead of this kludge. */ | |
1015 | ||
1016 | if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD | |
1017 | && GET_CODE (src) == CONST_INT | |
1018 | && INTVAL (src) > 0 | |
1019 | && 0 != (INTVAL (src) | |
1020 | & ((HOST_WIDE_INT) 1 | |
9e69be8c | 1021 | << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) |
9afa3d54 RK |
1022 | src = GEN_INT (INTVAL (src) |
1023 | | ((HOST_WIDE_INT) (-1) | |
1024 | << GET_MODE_BITSIZE (GET_MODE (x)))); | |
1025 | #endif | |
1026 | ||
0a0440c9 | 1027 | /* Don't call nonzero_bits if it cannot change anything. */ |
5eaad481 PB |
1028 | if (reg_stat[REGNO (x)].nonzero_bits != ~(unsigned HOST_WIDE_INT) 0) |
1029 | reg_stat[REGNO (x)].nonzero_bits | |
0a0440c9 | 1030 | |= nonzero_bits (src, nonzero_bits_mode); |
d0ab8cd3 | 1031 | num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x)); |
5eaad481 PB |
1032 | if (reg_stat[REGNO (x)].sign_bit_copies == 0 |
1033 | || reg_stat[REGNO (x)].sign_bit_copies > num) | |
1034 | reg_stat[REGNO (x)].sign_bit_copies = num; | |
d0ab8cd3 | 1035 | } |
230d793d | 1036 | else |
d0ab8cd3 | 1037 | { |
5eaad481 PB |
1038 | reg_stat[REGNO (x)].nonzero_bits = GET_MODE_MASK (GET_MODE (x)); |
1039 | reg_stat[REGNO (x)].sign_bit_copies = 1; | |
d0ab8cd3 | 1040 | } |
230d793d RS |
1041 | } |
1042 | } | |
1043 | \f | |
1044 | /* See if INSN can be combined into I3. PRED and SUCC are optionally | |
1045 | insns that were previously combined into I3 or that will be combined | |
1046 | into the merger of INSN and I3. | |
1047 | ||
1048 | Return 0 if the combination is not allowed for any reason. | |
1049 | ||
663522cb | 1050 | If the combination is allowed, *PDEST will be set to the single |
230d793d RS |
1051 | destination of INSN and *PSRC to the single source, and this function |
1052 | will return 1. */ | |
1053 | ||
1054 | static int | |
79a490a9 AJ |
1055 | can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ, |
1056 | rtx *pdest, rtx *psrc) | |
230d793d RS |
1057 | { |
1058 | int i; | |
1059 | rtx set = 0, src, dest; | |
b729186a JL |
1060 | rtx p; |
1061 | #ifdef AUTO_INC_DEC | |
76d31c63 | 1062 | rtx link; |
b729186a | 1063 | #endif |
230d793d RS |
1064 | int all_adjacent = (succ ? (next_active_insn (insn) == succ |
1065 | && next_active_insn (succ) == i3) | |
1066 | : next_active_insn (insn) == i3); | |
1067 | ||
1068 | /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0. | |
663522cb | 1069 | or a PARALLEL consisting of such a SET and CLOBBERs. |
230d793d RS |
1070 | |
1071 | If INSN has CLOBBER parallel parts, ignore them for our processing. | |
1072 | By definition, these happen during the execution of the insn. When it | |
1073 | is merged with another insn, all bets are off. If they are, in fact, | |
1074 | needed and aren't also supplied in I3, they may be added by | |
663522cb | 1075 | recog_for_combine. Otherwise, it won't match. |
230d793d RS |
1076 | |
1077 | We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED | |
1078 | note. | |
1079 | ||
663522cb | 1080 | Get the source and destination of INSN. If more than one, can't |
230d793d | 1081 | combine. */ |
663522cb | 1082 | |
230d793d RS |
1083 | if (GET_CODE (PATTERN (insn)) == SET) |
1084 | set = PATTERN (insn); | |
1085 | else if (GET_CODE (PATTERN (insn)) == PARALLEL | |
1086 | && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET) | |
1087 | { | |
1088 | for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++) | |
1089 | { | |
1090 | rtx elt = XVECEXP (PATTERN (insn), 0, i); | |
da6fdad3 | 1091 | rtx note; |
230d793d RS |
1092 | |
1093 | switch (GET_CODE (elt)) | |
1094 | { | |
e3258cef R |
1095 | /* This is important to combine floating point insns |
1096 | for the SH4 port. */ | |
1097 | case USE: | |
1098 | /* Combining an isolated USE doesn't make sense. | |
d2604ae9 | 1099 | We depend here on combinable_i3pat to reject them. */ |
e3258cef R |
1100 | /* The code below this loop only verifies that the inputs of |
1101 | the SET in INSN do not change. We call reg_set_between_p | |
eaec9b3d | 1102 | to verify that the REG in the USE does not change between |
e3258cef R |
1103 | I3 and INSN. |
1104 | If the USE in INSN was for a pseudo register, the matching | |
1105 | insn pattern will likely match any register; combining this | |
1106 | with any other USE would only be safe if we knew that the | |
1107 | used registers have identical values, or if there was | |
1108 | something to tell them apart, e.g. different modes. For | |
eaec9b3d | 1109 | now, we forgo such complicated tests and simply disallow |
e3258cef | 1110 | combining of USES of pseudo registers with any other USE. */ |
f8cfc6aa | 1111 | if (REG_P (XEXP (elt, 0)) |
e3258cef R |
1112 | && GET_CODE (PATTERN (i3)) == PARALLEL) |
1113 | { | |
1114 | rtx i3pat = PATTERN (i3); | |
1115 | int i = XVECLEN (i3pat, 0) - 1; | |
770ae6cc RK |
1116 | unsigned int regno = REGNO (XEXP (elt, 0)); |
1117 | ||
e3258cef R |
1118 | do |
1119 | { | |
1120 | rtx i3elt = XVECEXP (i3pat, 0, i); | |
770ae6cc | 1121 | |
e3258cef | 1122 | if (GET_CODE (i3elt) == USE |
f8cfc6aa | 1123 | && REG_P (XEXP (i3elt, 0)) |
e3258cef R |
1124 | && (REGNO (XEXP (i3elt, 0)) == regno |
1125 | ? reg_set_between_p (XEXP (elt, 0), | |
1126 | PREV_INSN (insn), i3) | |
1127 | : regno >= FIRST_PSEUDO_REGISTER)) | |
1128 | return 0; | |
1129 | } | |
1130 | while (--i >= 0); | |
1131 | } | |
1132 | break; | |
1133 | ||
230d793d RS |
1134 | /* We can ignore CLOBBERs. */ |
1135 | case CLOBBER: | |
1136 | break; | |
1137 | ||
1138 | case SET: | |
1139 | /* Ignore SETs whose result isn't used but not those that | |
1140 | have side-effects. */ | |
1141 | if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt)) | |
da6fdad3 AM |
1142 | && (!(note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) |
1143 | || INTVAL (XEXP (note, 0)) <= 0) | |
230d793d RS |
1144 | && ! side_effects_p (elt)) |
1145 | break; | |
1146 | ||
1147 | /* If we have already found a SET, this is a second one and | |
1148 | so we cannot combine with this insn. */ | |
1149 | if (set) | |
1150 | return 0; | |
1151 | ||
1152 | set = elt; | |
1153 | break; | |
1154 | ||
1155 | default: | |
1156 | /* Anything else means we can't combine. */ | |
1157 | return 0; | |
1158 | } | |
1159 | } | |
1160 | ||
1161 | if (set == 0 | |
1162 | /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs, | |
1163 | so don't do anything with it. */ | |
1164 | || GET_CODE (SET_SRC (set)) == ASM_OPERANDS) | |
1165 | return 0; | |
1166 | } | |
1167 | else | |
1168 | return 0; | |
1169 | ||
1170 | if (set == 0) | |
1171 | return 0; | |
1172 | ||
1173 | set = expand_field_assignment (set); | |
1174 | src = SET_SRC (set), dest = SET_DEST (set); | |
1175 | ||
1176 | /* Don't eliminate a store in the stack pointer. */ | |
1177 | if (dest == stack_pointer_rtx | |
230d793d RS |
1178 | /* Don't combine with an insn that sets a register to itself if it has |
1179 | a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */ | |
5f4f0e22 | 1180 | || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX)) |
62f7f1f5 GK |
1181 | /* Can't merge an ASM_OPERANDS. */ |
1182 | || GET_CODE (src) == ASM_OPERANDS | |
230d793d RS |
1183 | /* Can't merge a function call. */ |
1184 | || GET_CODE (src) == CALL | |
cd5e8f1f | 1185 | /* Don't eliminate a function call argument. */ |
4b4bf941 | 1186 | || (CALL_P (i3) |
4dca5ec5 | 1187 | && (find_reg_fusage (i3, USE, dest) |
f8cfc6aa | 1188 | || (REG_P (dest) |
4dca5ec5 RK |
1189 | && REGNO (dest) < FIRST_PSEUDO_REGISTER |
1190 | && global_regs[REGNO (dest)]))) | |
230d793d RS |
1191 | /* Don't substitute into an incremented register. */ |
1192 | || FIND_REG_INC_NOTE (i3, dest) | |
1193 | || (succ && FIND_REG_INC_NOTE (succ, dest)) | |
2f39b6ca UW |
1194 | /* Don't substitute into a non-local goto, this confuses CFG. */ |
1195 | || (JUMP_P (i3) && find_reg_note (i3, REG_NON_LOCAL_GOTO, NULL_RTX)) | |
ec35104c | 1196 | #if 0 |
230d793d | 1197 | /* Don't combine the end of a libcall into anything. */ |
ec35104c JL |
1198 | /* ??? This gives worse code, and appears to be unnecessary, since no |
1199 | pass after flow uses REG_LIBCALL/REG_RETVAL notes. Local-alloc does | |
1200 | use REG_RETVAL notes for noconflict blocks, but other code here | |
1201 | makes sure that those insns don't disappear. */ | |
5f4f0e22 | 1202 | || find_reg_note (insn, REG_RETVAL, NULL_RTX) |
ec35104c | 1203 | #endif |
230d793d RS |
1204 | /* Make sure that DEST is not used after SUCC but before I3. */ |
1205 | || (succ && ! all_adjacent | |
1206 | && reg_used_between_p (dest, succ, i3)) | |
1207 | /* Make sure that the value that is to be substituted for the register | |
1208 | does not use any registers whose values alter in between. However, | |
1209 | If the insns are adjacent, a use can't cross a set even though we | |
1210 | think it might (this can happen for a sequence of insns each setting | |
5eaad481 | 1211 | the same destination; last_set of that register might point to |
d81481d3 RK |
1212 | a NOTE). If INSN has a REG_EQUIV note, the register is always |
1213 | equivalent to the memory so the substitution is valid even if there | |
1214 | are intervening stores. Also, don't move a volatile asm or | |
1215 | UNSPEC_VOLATILE across any other insns. */ | |
230d793d | 1216 | || (! all_adjacent |
3c0cb5de | 1217 | && (((!MEM_P (src) |
d81481d3 RK |
1218 | || ! find_reg_note (insn, REG_EQUIV, src)) |
1219 | && use_crosses_set_p (src, INSN_CUID (insn))) | |
a66a10c7 RS |
1220 | || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src)) |
1221 | || GET_CODE (src) == UNSPEC_VOLATILE)) | |
230d793d RS |
1222 | /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get |
1223 | better register allocation by not doing the combine. */ | |
1224 | || find_reg_note (i3, REG_NO_CONFLICT, dest) | |
1225 | || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest)) | |
1226 | /* Don't combine across a CALL_INSN, because that would possibly | |
1227 | change whether the life span of some REGs crosses calls or not, | |
1228 | and it is a pain to update that information. | |
1229 | Exception: if source is a constant, moving it later can't hurt. | |
1230 | Accept that special case, because it helps -fforce-addr a lot. */ | |
1231 | || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src))) | |
1232 | return 0; | |
1233 | ||
1234 | /* DEST must either be a REG or CC0. */ | |
f8cfc6aa | 1235 | if (REG_P (dest)) |
230d793d RS |
1236 | { |
1237 | /* If register alignment is being enforced for multi-word items in all | |
1238 | cases except for parameters, it is possible to have a register copy | |
1239 | insn referencing a hard register that is not allowed to contain the | |
1240 | mode being copied and which would not be valid as an operand of most | |
1241 | insns. Eliminate this problem by not combining with such an insn. | |
1242 | ||
1243 | Also, on some machines we don't want to extend the life of a hard | |
53895717 | 1244 | register. */ |
230d793d | 1245 | |
f8cfc6aa | 1246 | if (REG_P (src) |
230d793d RS |
1247 | && ((REGNO (dest) < FIRST_PSEUDO_REGISTER |
1248 | && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest))) | |
c448a43e RK |
1249 | /* Don't extend the life of a hard register unless it is |
1250 | user variable (if we have few registers) or it can't | |
1251 | fit into the desired register (meaning something special | |
ecd40809 RK |
1252 | is going on). |
1253 | Also avoid substituting a return register into I3, because | |
1254 | reload can't handle a conflict with constraints of other | |
1255 | inputs. */ | |
230d793d | 1256 | || (REGNO (src) < FIRST_PSEUDO_REGISTER |
53895717 | 1257 | && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))))) |
230d793d RS |
1258 | return 0; |
1259 | } | |
1260 | else if (GET_CODE (dest) != CC0) | |
1261 | return 0; | |
1262 | ||
45da19e3 | 1263 | |
230d793d RS |
1264 | if (GET_CODE (PATTERN (i3)) == PARALLEL) |
1265 | for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--) | |
45da19e3 UW |
1266 | if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER) |
1267 | { | |
1268 | /* Don't substitute for a register intended as a clobberable | |
8c27b7d4 | 1269 | operand. */ |
45da19e3 UW |
1270 | rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0); |
1271 | if (rtx_equal_p (reg, dest)) | |
1272 | return 0; | |
1273 | ||
1274 | /* If the clobber represents an earlyclobber operand, we must not | |
1275 | substitute an expression containing the clobbered register. | |
1276 | As we do not analyse the constraint strings here, we have to | |
1277 | make the conservative assumption. However, if the register is | |
1278 | a fixed hard reg, the clobber cannot represent any operand; | |
1279 | we leave it up to the machine description to either accept or | |
1280 | reject use-and-clobber patterns. */ | |
1281 | if (!REG_P (reg) | |
1282 | || REGNO (reg) >= FIRST_PSEUDO_REGISTER | |
1283 | || !fixed_regs[REGNO (reg)]) | |
1284 | if (reg_overlap_mentioned_p (reg, src)) | |
1285 | return 0; | |
1286 | } | |
230d793d RS |
1287 | |
1288 | /* If INSN contains anything volatile, or is an `asm' (whether volatile | |
d276f2bb | 1289 | or not), reject, unless nothing volatile comes between it and I3 */ |
230d793d RS |
1290 | |
1291 | if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src)) | |
d276f2bb CM |
1292 | { |
1293 | /* Make sure succ doesn't contain a volatile reference. */ | |
1294 | if (succ != 0 && volatile_refs_p (PATTERN (succ))) | |
1295 | return 0; | |
663522cb | 1296 | |
d276f2bb | 1297 | for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p)) |
2c3c49de | 1298 | if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p))) |
cf0d9408 | 1299 | return 0; |
d276f2bb | 1300 | } |
230d793d | 1301 | |
b79ee7eb RH |
1302 | /* If INSN is an asm, and DEST is a hard register, reject, since it has |
1303 | to be an explicit register variable, and was chosen for a reason. */ | |
1304 | ||
1305 | if (GET_CODE (src) == ASM_OPERANDS | |
f8cfc6aa | 1306 | && REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER) |
b79ee7eb RH |
1307 | return 0; |
1308 | ||
4b2cb4a2 RS |
1309 | /* If there are any volatile insns between INSN and I3, reject, because |
1310 | they might affect machine state. */ | |
1311 | ||
1312 | for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p)) | |
2c3c49de | 1313 | if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p))) |
4b2cb4a2 RS |
1314 | return 0; |
1315 | ||
230d793d RS |
1316 | /* If INSN or I2 contains an autoincrement or autodecrement, |
1317 | make sure that register is not used between there and I3, | |
1318 | and not already used in I3 either. | |
1319 | Also insist that I3 not be a jump; if it were one | |
1320 | and the incremented register were spilled, we would lose. */ | |
1321 | ||
1322 | #ifdef AUTO_INC_DEC | |
1323 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
1324 | if (REG_NOTE_KIND (link) == REG_INC | |
4b4bf941 | 1325 | && (JUMP_P (i3) |
230d793d RS |
1326 | || reg_used_between_p (XEXP (link, 0), insn, i3) |
1327 | || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3)))) | |
1328 | return 0; | |
1329 | #endif | |
1330 | ||
1331 | #ifdef HAVE_cc0 | |
1332 | /* Don't combine an insn that follows a CC0-setting insn. | |
1333 | An insn that uses CC0 must not be separated from the one that sets it. | |
1334 | We do, however, allow I2 to follow a CC0-setting insn if that insn | |
1335 | is passed as I1; in that case it will be deleted also. | |
1336 | We also allow combining in this case if all the insns are adjacent | |
1337 | because that would leave the two CC0 insns adjacent as well. | |
1338 | It would be more logical to test whether CC0 occurs inside I1 or I2, | |
1339 | but that would be much slower, and this ought to be equivalent. */ | |
1340 | ||
1341 | p = prev_nonnote_insn (insn); | |
4b4bf941 | 1342 | if (p && p != pred && NONJUMP_INSN_P (p) && sets_cc0_p (PATTERN (p)) |
230d793d RS |
1343 | && ! all_adjacent) |
1344 | return 0; | |
1345 | #endif | |
1346 | ||
1347 | /* If we get here, we have passed all the tests and the combination is | |
1348 | to be allowed. */ | |
1349 | ||
1350 | *pdest = dest; | |
1351 | *psrc = src; | |
1352 | ||
1353 | return 1; | |
1354 | } | |
1355 | \f | |
1356 | /* LOC is the location within I3 that contains its pattern or the component | |
1357 | of a PARALLEL of the pattern. We validate that it is valid for combining. | |
1358 | ||
1359 | One problem is if I3 modifies its output, as opposed to replacing it | |
1360 | entirely, we can't allow the output to contain I2DEST or I1DEST as doing | |
1361 | so would produce an insn that is not equivalent to the original insns. | |
1362 | ||
1363 | Consider: | |
1364 | ||
1365 | (set (reg:DI 101) (reg:DI 100)) | |
1366 | (set (subreg:SI (reg:DI 101) 0) <foo>) | |
1367 | ||
1368 | This is NOT equivalent to: | |
1369 | ||
1370 | (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>) | |
23190837 | 1371 | (set (reg:DI 101) (reg:DI 100))]) |
230d793d RS |
1372 | |
1373 | Not only does this modify 100 (in which case it might still be valid | |
663522cb | 1374 | if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100. |
230d793d RS |
1375 | |
1376 | We can also run into a problem if I2 sets a register that I1 | |
1377 | uses and I1 gets directly substituted into I3 (not via I2). In that | |
1378 | case, we would be getting the wrong value of I2DEST into I3, so we | |
1379 | must reject the combination. This case occurs when I2 and I1 both | |
1380 | feed into I3, rather than when I1 feeds into I2, which feeds into I3. | |
da7d8304 | 1381 | If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source |
230d793d RS |
1382 | of a SET must prevent combination from occurring. |
1383 | ||
230d793d RS |
1384 | Before doing the above check, we first try to expand a field assignment |
1385 | into a set of logical operations. | |
1386 | ||
da7d8304 | 1387 | If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which |
230d793d RS |
1388 | we place a register that is both set and used within I3. If more than one |
1389 | such register is detected, we fail. | |
1390 | ||
1391 | Return 1 if the combination is valid, zero otherwise. */ | |
1392 | ||
1393 | static int | |
79a490a9 AJ |
1394 | combinable_i3pat (rtx i3, rtx *loc, rtx i2dest, rtx i1dest, |
1395 | int i1_not_in_src, rtx *pi3dest_killed) | |
230d793d RS |
1396 | { |
1397 | rtx x = *loc; | |
1398 | ||
1399 | if (GET_CODE (x) == SET) | |
1400 | { | |
73a39fc4 | 1401 | rtx set = x ; |
230d793d RS |
1402 | rtx dest = SET_DEST (set); |
1403 | rtx src = SET_SRC (set); | |
29a82058 | 1404 | rtx inner_dest = dest; |
663522cb | 1405 | |
230d793d RS |
1406 | while (GET_CODE (inner_dest) == STRICT_LOW_PART |
1407 | || GET_CODE (inner_dest) == SUBREG | |
1408 | || GET_CODE (inner_dest) == ZERO_EXTRACT) | |
1409 | inner_dest = XEXP (inner_dest, 0); | |
1410 | ||
0595d388 AO |
1411 | /* Check for the case where I3 modifies its output, as discussed |
1412 | above. We don't want to prevent pseudos from being combined | |
1413 | into the address of a MEM, so only prevent the combination if | |
1414 | i1 or i2 set the same MEM. */ | |
1415 | if ((inner_dest != dest && | |
3c0cb5de | 1416 | (!MEM_P (inner_dest) |
0595d388 AO |
1417 | || rtx_equal_p (i2dest, inner_dest) |
1418 | || (i1dest && rtx_equal_p (i1dest, inner_dest))) | |
230d793d RS |
1419 | && (reg_overlap_mentioned_p (i2dest, inner_dest) |
1420 | || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest)))) | |
956d6950 | 1421 | |
53895717 BS |
1422 | /* This is the same test done in can_combine_p except we can't test |
1423 | all_adjacent; we don't have to, since this instruction will stay | |
1424 | in place, thus we are not considering increasing the lifetime of | |
1425 | INNER_DEST. | |
956d6950 JL |
1426 | |
1427 | Also, if this insn sets a function argument, combining it with | |
1428 | something that might need a spill could clobber a previous | |
1429 | function argument; the all_adjacent test in can_combine_p also | |
1430 | checks this; here, we do a more specific test for this case. */ | |
663522cb | 1431 | |
f8cfc6aa | 1432 | || (REG_P (inner_dest) |
dfbe1b2f | 1433 | && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER |
c448a43e | 1434 | && (! HARD_REGNO_MODE_OK (REGNO (inner_dest), |
53895717 | 1435 | GET_MODE (inner_dest)))) |
230d793d RS |
1436 | || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src))) |
1437 | return 0; | |
1438 | ||
1439 | /* If DEST is used in I3, it is being killed in this insn, | |
663522cb | 1440 | so record that for later. |
36a9c2e9 JL |
1441 | Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the |
1442 | STACK_POINTER_REGNUM, since these are always considered to be | |
1443 | live. Similarly for ARG_POINTER_REGNUM if it is fixed. */ | |
f8cfc6aa | 1444 | if (pi3dest_killed && REG_P (dest) |
36a9c2e9 JL |
1445 | && reg_referenced_p (dest, PATTERN (i3)) |
1446 | && REGNO (dest) != FRAME_POINTER_REGNUM | |
6d7096b0 DE |
1447 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM |
1448 | && REGNO (dest) != HARD_FRAME_POINTER_REGNUM | |
1449 | #endif | |
36a9c2e9 JL |
1450 | #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM |
1451 | && (REGNO (dest) != ARG_POINTER_REGNUM | |
1452 | || ! fixed_regs [REGNO (dest)]) | |
1453 | #endif | |
1454 | && REGNO (dest) != STACK_POINTER_REGNUM) | |
230d793d RS |
1455 | { |
1456 | if (*pi3dest_killed) | |
1457 | return 0; | |
1458 | ||
1459 | *pi3dest_killed = dest; | |
1460 | } | |
1461 | } | |
1462 | ||
1463 | else if (GET_CODE (x) == PARALLEL) | |
1464 | { | |
1465 | int i; | |
1466 | ||
1467 | for (i = 0; i < XVECLEN (x, 0); i++) | |
1468 | if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest, | |
1469 | i1_not_in_src, pi3dest_killed)) | |
1470 | return 0; | |
1471 | } | |
1472 | ||
1473 | return 1; | |
1474 | } | |
1475 | \f | |
14a774a9 RK |
1476 | /* Return 1 if X is an arithmetic expression that contains a multiplication |
1477 | and division. We don't count multiplications by powers of two here. */ | |
1478 | ||
1479 | static int | |
79a490a9 | 1480 | contains_muldiv (rtx x) |
14a774a9 RK |
1481 | { |
1482 | switch (GET_CODE (x)) | |
1483 | { | |
1484 | case MOD: case DIV: case UMOD: case UDIV: | |
1485 | return 1; | |
1486 | ||
1487 | case MULT: | |
1488 | return ! (GET_CODE (XEXP (x, 1)) == CONST_INT | |
1489 | && exact_log2 (INTVAL (XEXP (x, 1))) >= 0); | |
1490 | default: | |
ec8e098d PB |
1491 | if (BINARY_P (x)) |
1492 | return contains_muldiv (XEXP (x, 0)) | |
14a774a9 RK |
1493 | || contains_muldiv (XEXP (x, 1)); |
1494 | ||
ec8e098d PB |
1495 | if (UNARY_P (x)) |
1496 | return contains_muldiv (XEXP (x, 0)); | |
14a774a9 | 1497 | |
ec8e098d | 1498 | return 0; |
14a774a9 RK |
1499 | } |
1500 | } | |
1501 | \f | |
c3410241 BS |
1502 | /* Determine whether INSN can be used in a combination. Return nonzero if |
1503 | not. This is used in try_combine to detect early some cases where we | |
1504 | can't perform combinations. */ | |
1505 | ||
1506 | static int | |
79a490a9 | 1507 | cant_combine_insn_p (rtx insn) |
c3410241 BS |
1508 | { |
1509 | rtx set; | |
1510 | rtx src, dest; | |
23190837 | 1511 | |
c3410241 BS |
1512 | /* If this isn't really an insn, we can't do anything. |
1513 | This can occur when flow deletes an insn that it has merged into an | |
1514 | auto-increment address. */ | |
1515 | if (! INSN_P (insn)) | |
1516 | return 1; | |
1517 | ||
7f0ea82e R |
1518 | /* Never combine loads and stores involving hard regs that are likely |
1519 | to be spilled. The register allocator can usually handle such | |
cafe096b | 1520 | reg-reg moves by tying. If we allow the combiner to make |
7f0ea82e | 1521 | substitutions of likely-spilled regs, we may abort in reload. |
c3410241 BS |
1522 | As an exception, we allow combinations involving fixed regs; these are |
1523 | not available to the register allocator so there's no risk involved. */ | |
1524 | ||
1525 | set = single_set (insn); | |
1526 | if (! set) | |
1527 | return 0; | |
1528 | src = SET_SRC (set); | |
1529 | dest = SET_DEST (set); | |
ad334b51 JH |
1530 | if (GET_CODE (src) == SUBREG) |
1531 | src = SUBREG_REG (src); | |
1532 | if (GET_CODE (dest) == SUBREG) | |
1533 | dest = SUBREG_REG (dest); | |
53895717 BS |
1534 | if (REG_P (src) && REG_P (dest) |
1535 | && ((REGNO (src) < FIRST_PSEUDO_REGISTER | |
7f0ea82e R |
1536 | && ! fixed_regs[REGNO (src)] |
1537 | && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (src)))) | |
53895717 | 1538 | || (REGNO (dest) < FIRST_PSEUDO_REGISTER |
7f0ea82e R |
1539 | && ! fixed_regs[REGNO (dest)] |
1540 | && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest)))))) | |
c3410241 | 1541 | return 1; |
53895717 | 1542 | |
c3410241 BS |
1543 | return 0; |
1544 | } | |
1545 | ||
8c03ca00 EB |
1546 | /* Adjust INSN after we made a change to its destination. |
1547 | ||
1548 | Changing the destination can invalidate notes that say something about | |
1549 | the results of the insn and a LOG_LINK pointing to the insn. */ | |
1550 | ||
1551 | static void | |
1552 | adjust_for_new_dest (rtx insn) | |
1553 | { | |
1554 | rtx *loc; | |
1555 | ||
1556 | /* For notes, be conservative and simply remove them. */ | |
1557 | loc = ®_NOTES (insn); | |
1558 | while (*loc) | |
1559 | { | |
1560 | enum reg_note kind = REG_NOTE_KIND (*loc); | |
1561 | if (kind == REG_EQUAL || kind == REG_EQUIV) | |
1562 | *loc = XEXP (*loc, 1); | |
1563 | else | |
1564 | loc = &XEXP (*loc, 1); | |
1565 | } | |
1566 | ||
1567 | /* The new insn will have a destination that was previously the destination | |
1568 | of an insn just above it. Call distribute_links to make a LOG_LINK from | |
1569 | the next use of that destination. */ | |
1570 | distribute_links (gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX)); | |
1571 | } | |
1572 | ||
230d793d RS |
1573 | /* Try to combine the insns I1 and I2 into I3. |
1574 | Here I1 and I2 appear earlier than I3. | |
1575 | I1 can be zero; then we combine just I2 into I3. | |
663522cb | 1576 | |
04956a1a | 1577 | If we are combining three insns and the resulting insn is not recognized, |
230d793d RS |
1578 | try splitting it into two insns. If that happens, I2 and I3 are retained |
1579 | and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2 | |
1580 | are pseudo-deleted. | |
1581 | ||
663522cb | 1582 | Return 0 if the combination does not work. Then nothing is changed. |
abe6e52f | 1583 | If we did the combination, return the insn at which combine should |
663522cb KH |
1584 | resume scanning. |
1585 | ||
da7d8304 | 1586 | Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a |
44a76fc8 | 1587 | new direct jump instruction. */ |
230d793d RS |
1588 | |
1589 | static rtx | |
79a490a9 | 1590 | try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p) |
230d793d | 1591 | { |
02359929 | 1592 | /* New patterns for I3 and I2, respectively. */ |
230d793d | 1593 | rtx newpat, newi2pat = 0; |
cddd8b72 | 1594 | int substed_i2 = 0, substed_i1 = 0; |
230d793d RS |
1595 | /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */ |
1596 | int added_sets_1, added_sets_2; | |
1597 | /* Total number of SETs to put into I3. */ | |
1598 | int total_sets; | |
a1105617 | 1599 | /* Nonzero if I2's body now appears in I3. */ |
230d793d RS |
1600 | int i2_is_used; |
1601 | /* INSN_CODEs for new I3, new I2, and user of condition code. */ | |
6a651371 | 1602 | int insn_code_number, i2_code_number = 0, other_code_number = 0; |
230d793d RS |
1603 | /* Contains I3 if the destination of I3 is used in its source, which means |
1604 | that the old life of I3 is being killed. If that usage is placed into | |
1605 | I2 and not in I3, a REG_DEAD note must be made. */ | |
1606 | rtx i3dest_killed = 0; | |
1607 | /* SET_DEST and SET_SRC of I2 and I1. */ | |
1608 | rtx i2dest, i2src, i1dest = 0, i1src = 0; | |
1609 | /* PATTERN (I2), or a copy of it in certain cases. */ | |
1610 | rtx i2pat; | |
1611 | /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */ | |
c4e861e8 | 1612 | int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0; |
230d793d RS |
1613 | int i1_feeds_i3 = 0; |
1614 | /* Notes that must be added to REG_NOTES in I3 and I2. */ | |
1615 | rtx new_i3_notes, new_i2_notes; | |
176c9e6b JW |
1616 | /* Notes that we substituted I3 into I2 instead of the normal case. */ |
1617 | int i3_subst_into_i2 = 0; | |
df7d75de RK |
1618 | /* Notes that I1, I2 or I3 is a MULT operation. */ |
1619 | int have_mult = 0; | |
9e42ab3e | 1620 | int swap_i2i3 = 0; |
230d793d RS |
1621 | |
1622 | int maxreg; | |
1623 | rtx temp; | |
b3694847 | 1624 | rtx link; |
230d793d RS |
1625 | int i; |
1626 | ||
c3410241 BS |
1627 | /* Exit early if one of the insns involved can't be used for |
1628 | combinations. */ | |
1629 | if (cant_combine_insn_p (i3) | |
1630 | || cant_combine_insn_p (i2) | |
1631 | || (i1 && cant_combine_insn_p (i1)) | |
1632 | /* We also can't do anything if I3 has a | |
1633 | REG_LIBCALL note since we don't want to disrupt the contiguity of a | |
1634 | libcall. */ | |
ec35104c JL |
1635 | #if 0 |
1636 | /* ??? This gives worse code, and appears to be unnecessary, since no | |
1637 | pass after flow uses REG_LIBCALL/REG_RETVAL notes. */ | |
1638 | || find_reg_note (i3, REG_LIBCALL, NULL_RTX) | |
1639 | #endif | |
663522cb | 1640 | ) |
230d793d RS |
1641 | return 0; |
1642 | ||
1643 | combine_attempts++; | |
230d793d RS |
1644 | undobuf.other_insn = 0; |
1645 | ||
6e25d159 RK |
1646 | /* Reset the hard register usage information. */ |
1647 | CLEAR_HARD_REG_SET (newpat_used_regs); | |
1648 | ||
230d793d RS |
1649 | /* If I1 and I2 both feed I3, they can be in any order. To simplify the |
1650 | code below, set I1 to be the earlier of the two insns. */ | |
1651 | if (i1 && INSN_CUID (i1) > INSN_CUID (i2)) | |
1652 | temp = i1, i1 = i2, i2 = temp; | |
1653 | ||
abe6e52f | 1654 | added_links_insn = 0; |
137e889e | 1655 | |
230d793d | 1656 | /* First check for one important special-case that the code below will |
c7be4f66 | 1657 | not handle. Namely, the case where I1 is zero, I2 is a PARALLEL |
230d793d RS |
1658 | and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case, |
1659 | we may be able to replace that destination with the destination of I3. | |
1660 | This occurs in the common code where we compute both a quotient and | |
1661 | remainder into a structure, in which case we want to do the computation | |
1662 | directly into the structure to avoid register-register copies. | |
1663 | ||
c7be4f66 RK |
1664 | Note that this case handles both multiple sets in I2 and also |
1665 | cases where I2 has a number of CLOBBER or PARALLELs. | |
1666 | ||
230d793d RS |
1667 | We make very conservative checks below and only try to handle the |
1668 | most common cases of this. For example, we only handle the case | |
1669 | where I2 and I3 are adjacent to avoid making difficult register | |
1670 | usage tests. */ | |
1671 | ||
4b4bf941 | 1672 | if (i1 == 0 && NONJUMP_INSN_P (i3) && GET_CODE (PATTERN (i3)) == SET |
f8cfc6aa | 1673 | && REG_P (SET_SRC (PATTERN (i3))) |
230d793d | 1674 | && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER |
230d793d RS |
1675 | && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3))) |
1676 | && GET_CODE (PATTERN (i2)) == PARALLEL | |
1677 | && ! side_effects_p (SET_DEST (PATTERN (i3))) | |
5089e22e RS |
1678 | /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code |
1679 | below would need to check what is inside (and reg_overlap_mentioned_p | |
1680 | doesn't support those codes anyway). Don't allow those destinations; | |
1681 | the resulting insn isn't likely to be recognized anyway. */ | |
1682 | && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT | |
1683 | && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART | |
230d793d RS |
1684 | && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)), |
1685 | SET_DEST (PATTERN (i3))) | |
1686 | && next_real_insn (i2) == i3) | |
5089e22e RS |
1687 | { |
1688 | rtx p2 = PATTERN (i2); | |
1689 | ||
1690 | /* Make sure that the destination of I3, | |
1691 | which we are going to substitute into one output of I2, | |
1692 | is not used within another output of I2. We must avoid making this: | |
1693 | (parallel [(set (mem (reg 69)) ...) | |
1694 | (set (reg 69) ...)]) | |
1695 | which is not well-defined as to order of actions. | |
1696 | (Besides, reload can't handle output reloads for this.) | |
1697 | ||
1698 | The problem can also happen if the dest of I3 is a memory ref, | |
1699 | if another dest in I2 is an indirect memory ref. */ | |
1700 | for (i = 0; i < XVECLEN (p2, 0); i++) | |
7ca919b7 RK |
1701 | if ((GET_CODE (XVECEXP (p2, 0, i)) == SET |
1702 | || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER) | |
5089e22e RS |
1703 | && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)), |
1704 | SET_DEST (XVECEXP (p2, 0, i)))) | |
1705 | break; | |
230d793d | 1706 | |
5089e22e RS |
1707 | if (i == XVECLEN (p2, 0)) |
1708 | for (i = 0; i < XVECLEN (p2, 0); i++) | |
481c7efa FS |
1709 | if ((GET_CODE (XVECEXP (p2, 0, i)) == SET |
1710 | || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER) | |
1711 | && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3))) | |
5089e22e RS |
1712 | { |
1713 | combine_merges++; | |
230d793d | 1714 | |
5089e22e RS |
1715 | subst_insn = i3; |
1716 | subst_low_cuid = INSN_CUID (i2); | |
230d793d | 1717 | |
c4e861e8 | 1718 | added_sets_2 = added_sets_1 = 0; |
5089e22e | 1719 | i2dest = SET_SRC (PATTERN (i3)); |
230d793d | 1720 | |
5089e22e RS |
1721 | /* Replace the dest in I2 with our dest and make the resulting |
1722 | insn the new pattern for I3. Then skip to where we | |
1723 | validate the pattern. Everything was set up above. */ | |
663522cb | 1724 | SUBST (SET_DEST (XVECEXP (p2, 0, i)), |
5089e22e RS |
1725 | SET_DEST (PATTERN (i3))); |
1726 | ||
1727 | newpat = p2; | |
176c9e6b | 1728 | i3_subst_into_i2 = 1; |
5089e22e RS |
1729 | goto validate_replacement; |
1730 | } | |
1731 | } | |
230d793d | 1732 | |
667c1c2c RK |
1733 | /* If I2 is setting a double-word pseudo to a constant and I3 is setting |
1734 | one of those words to another constant, merge them by making a new | |
1735 | constant. */ | |
1736 | if (i1 == 0 | |
1737 | && (temp = single_set (i2)) != 0 | |
1738 | && (GET_CODE (SET_SRC (temp)) == CONST_INT | |
1739 | || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE) | |
f8cfc6aa | 1740 | && REG_P (SET_DEST (temp)) |
667c1c2c RK |
1741 | && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT |
1742 | && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD | |
1743 | && GET_CODE (PATTERN (i3)) == SET | |
1744 | && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG | |
1745 | && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp) | |
1746 | && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT | |
1747 | && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD | |
1748 | && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT) | |
1749 | { | |
1750 | HOST_WIDE_INT lo, hi; | |
1751 | ||
1752 | if (GET_CODE (SET_SRC (temp)) == CONST_INT) | |
1753 | lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0; | |
1754 | else | |
1755 | { | |
1756 | lo = CONST_DOUBLE_LOW (SET_SRC (temp)); | |
1757 | hi = CONST_DOUBLE_HIGH (SET_SRC (temp)); | |
1758 | } | |
1759 | ||
1760 | if (subreg_lowpart_p (SET_DEST (PATTERN (i3)))) | |
48b4d901 AO |
1761 | { |
1762 | /* We don't handle the case of the target word being wider | |
1763 | than a host wide int. */ | |
341c100f | 1764 | gcc_assert (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD); |
48b4d901 | 1765 | |
42a6ff51 | 1766 | lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1); |
73a39fc4 | 1767 | lo |= (INTVAL (SET_SRC (PATTERN (i3))) |
2ef1a7f9 | 1768 | & (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1)); |
48b4d901 AO |
1769 | } |
1770 | else if (HOST_BITS_PER_WIDE_INT == BITS_PER_WORD) | |
667c1c2c | 1771 | hi = INTVAL (SET_SRC (PATTERN (i3))); |
48b4d901 AO |
1772 | else if (HOST_BITS_PER_WIDE_INT >= 2 * BITS_PER_WORD) |
1773 | { | |
1774 | int sign = -(int) ((unsigned HOST_WIDE_INT) lo | |
1775 | >> (HOST_BITS_PER_WIDE_INT - 1)); | |
1776 | ||
42a6ff51 AO |
1777 | lo &= ~ (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD |
1778 | (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1)); | |
1779 | lo |= (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD | |
1780 | (INTVAL (SET_SRC (PATTERN (i3))))); | |
48b4d901 AO |
1781 | if (hi == sign) |
1782 | hi = lo < 0 ? -1 : 0; | |
1783 | } | |
1784 | else | |
1785 | /* We don't handle the case of the higher word not fitting | |
1786 | entirely in either hi or lo. */ | |
341c100f | 1787 | gcc_unreachable (); |
667c1c2c RK |
1788 | |
1789 | combine_merges++; | |
1790 | subst_insn = i3; | |
1791 | subst_low_cuid = INSN_CUID (i2); | |
1792 | added_sets_2 = added_sets_1 = 0; | |
1793 | i2dest = SET_DEST (temp); | |
1794 | ||
1795 | SUBST (SET_SRC (temp), | |
1796 | immed_double_const (lo, hi, GET_MODE (SET_DEST (temp)))); | |
1797 | ||
1798 | newpat = PATTERN (i2); | |
667c1c2c RK |
1799 | goto validate_replacement; |
1800 | } | |
1801 | ||
230d793d RS |
1802 | #ifndef HAVE_cc0 |
1803 | /* If we have no I1 and I2 looks like: | |
1804 | (parallel [(set (reg:CC X) (compare:CC OP (const_int 0))) | |
1805 | (set Y OP)]) | |
1806 | make up a dummy I1 that is | |
1807 | (set Y OP) | |
1808 | and change I2 to be | |
1809 | (set (reg:CC X) (compare:CC Y (const_int 0))) | |
1810 | ||
1811 | (We can ignore any trailing CLOBBERs.) | |
1812 | ||
1813 | This undoes a previous combination and allows us to match a branch-and- | |
1814 | decrement insn. */ | |
1815 | ||
1816 | if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL | |
1817 | && XVECLEN (PATTERN (i2), 0) >= 2 | |
1818 | && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET | |
1819 | && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0)))) | |
1820 | == MODE_CC) | |
1821 | && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE | |
1822 | && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx | |
1823 | && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET | |
f8cfc6aa | 1824 | && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) |
230d793d RS |
1825 | && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0), |
1826 | SET_SRC (XVECEXP (PATTERN (i2), 0, 1)))) | |
1827 | { | |
663522cb | 1828 | for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--) |
230d793d RS |
1829 | if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER) |
1830 | break; | |
1831 | ||
1832 | if (i == 1) | |
1833 | { | |
1834 | /* We make I1 with the same INSN_UID as I2. This gives it | |
1835 | the same INSN_CUID for value tracking. Our fake I1 will | |
1836 | never appear in the insn stream so giving it the same INSN_UID | |
1837 | as I2 will not cause a problem. */ | |
1838 | ||
4977bab6 | 1839 | i1 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2, |
0435312e | 1840 | BLOCK_FOR_INSN (i2), INSN_LOCATOR (i2), |
4977bab6 ZW |
1841 | XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX, |
1842 | NULL_RTX); | |
230d793d RS |
1843 | |
1844 | SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0)); | |
1845 | SUBST (XEXP (SET_SRC (PATTERN (i2)), 0), | |
1846 | SET_DEST (PATTERN (i1))); | |
1847 | } | |
1848 | } | |
1849 | #endif | |
1850 | ||
1851 | /* Verify that I2 and I1 are valid for combining. */ | |
5f4f0e22 CH |
1852 | if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src) |
1853 | || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src))) | |
230d793d RS |
1854 | { |
1855 | undo_all (); | |
1856 | return 0; | |
1857 | } | |
1858 | ||
1859 | /* Record whether I2DEST is used in I2SRC and similarly for the other | |
1860 | cases. Knowing this will help in register status updating below. */ | |
1861 | i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src); | |
1862 | i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src); | |
1863 | i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src); | |
1864 | ||
916f14f1 | 1865 | /* See if I1 directly feeds into I3. It does if I1DEST is not used |
230d793d RS |
1866 | in I2SRC. */ |
1867 | i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src); | |
1868 | ||
1869 | /* Ensure that I3's pattern can be the destination of combines. */ | |
1870 | if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest, | |
1871 | i1 && i2dest_in_i1src && i1_feeds_i3, | |
1872 | &i3dest_killed)) | |
1873 | { | |
1874 | undo_all (); | |
1875 | return 0; | |
1876 | } | |
1877 | ||
df7d75de RK |
1878 | /* See if any of the insns is a MULT operation. Unless one is, we will |
1879 | reject a combination that is, since it must be slower. Be conservative | |
1880 | here. */ | |
1881 | if (GET_CODE (i2src) == MULT | |
1882 | || (i1 != 0 && GET_CODE (i1src) == MULT) | |
1883 | || (GET_CODE (PATTERN (i3)) == SET | |
1884 | && GET_CODE (SET_SRC (PATTERN (i3))) == MULT)) | |
1885 | have_mult = 1; | |
1886 | ||
230d793d RS |
1887 | /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd. |
1888 | We used to do this EXCEPT in one case: I3 has a post-inc in an | |
1889 | output operand. However, that exception can give rise to insns like | |
23190837 | 1890 | mov r3,(r3)+ |
230d793d | 1891 | which is a famous insn on the PDP-11 where the value of r3 used as the |
5089e22e | 1892 | source was model-dependent. Avoid this sort of thing. */ |
230d793d RS |
1893 | |
1894 | #if 0 | |
1895 | if (!(GET_CODE (PATTERN (i3)) == SET | |
f8cfc6aa | 1896 | && REG_P (SET_SRC (PATTERN (i3))) |
3c0cb5de | 1897 | && MEM_P (SET_DEST (PATTERN (i3))) |
230d793d RS |
1898 | && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC |
1899 | || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC))) | |
1900 | /* It's not the exception. */ | |
1901 | #endif | |
1902 | #ifdef AUTO_INC_DEC | |
1903 | for (link = REG_NOTES (i3); link; link = XEXP (link, 1)) | |
1904 | if (REG_NOTE_KIND (link) == REG_INC | |
1905 | && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2)) | |
1906 | || (i1 != 0 | |
1907 | && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1))))) | |
1908 | { | |
1909 | undo_all (); | |
1910 | return 0; | |
1911 | } | |
1912 | #endif | |
1913 | ||
1914 | /* See if the SETs in I1 or I2 need to be kept around in the merged | |
1915 | instruction: whenever the value set there is still needed past I3. | |
1916 | For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3. | |
1917 | ||
1918 | For the SET in I1, we have two cases: If I1 and I2 independently | |
1919 | feed into I3, the set in I1 needs to be kept around if I1DEST dies | |
1920 | or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set | |
1921 | in I1 needs to be kept around unless I1DEST dies or is set in either | |
1922 | I2 or I3. We can distinguish these cases by seeing if I2SRC mentions | |
1923 | I1DEST. If so, we know I1 feeds into I2. */ | |
1924 | ||
1925 | added_sets_2 = ! dead_or_set_p (i3, i2dest); | |
1926 | ||
1927 | added_sets_1 | |
1928 | = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest) | |
1929 | : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest))); | |
1930 | ||
1931 | /* If the set in I2 needs to be kept around, we must make a copy of | |
1932 | PATTERN (I2), so that when we substitute I1SRC for I1DEST in | |
5089e22e | 1933 | PATTERN (I2), we are only substituting for the original I1DEST, not into |
230d793d RS |
1934 | an already-substituted copy. This also prevents making self-referential |
1935 | rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to | |
1936 | I2DEST. */ | |
1937 | ||
1938 | i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL | |
38a448ca | 1939 | ? gen_rtx_SET (VOIDmode, i2dest, i2src) |
230d793d RS |
1940 | : PATTERN (i2)); |
1941 | ||
1942 | if (added_sets_2) | |
1943 | i2pat = copy_rtx (i2pat); | |
1944 | ||
1945 | combine_merges++; | |
1946 | ||
1947 | /* Substitute in the latest insn for the regs set by the earlier ones. */ | |
1948 | ||
1949 | maxreg = max_reg_num (); | |
1950 | ||
1951 | subst_insn = i3; | |
230d793d RS |
1952 | |
1953 | /* It is possible that the source of I2 or I1 may be performing an | |
1954 | unneeded operation, such as a ZERO_EXTEND of something that is known | |
1955 | to have the high part zero. Handle that case by letting subst look at | |
1956 | the innermost one of them. | |
1957 | ||
1958 | Another way to do this would be to have a function that tries to | |
1959 | simplify a single insn instead of merging two or more insns. We don't | |
1960 | do this because of the potential of infinite loops and because | |
1961 | of the potential extra memory required. However, doing it the way | |
1962 | we are is a bit of a kludge and doesn't catch all cases. | |
1963 | ||
1964 | But only do this if -fexpensive-optimizations since it slows things down | |
1965 | and doesn't usually win. */ | |
1966 | ||
1967 | if (flag_expensive_optimizations) | |
1968 | { | |
9a5a17f3 | 1969 | /* Pass pc_rtx so no substitutions are done, just simplifications. */ |
230d793d | 1970 | if (i1) |
d0ab8cd3 RK |
1971 | { |
1972 | subst_low_cuid = INSN_CUID (i1); | |
1973 | i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0); | |
1974 | } | |
230d793d | 1975 | else |
d0ab8cd3 RK |
1976 | { |
1977 | subst_low_cuid = INSN_CUID (i2); | |
1978 | i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0); | |
1979 | } | |
230d793d RS |
1980 | } |
1981 | ||
1982 | #ifndef HAVE_cc0 | |
1983 | /* Many machines that don't use CC0 have insns that can both perform an | |
1984 | arithmetic operation and set the condition code. These operations will | |
1985 | be represented as a PARALLEL with the first element of the vector | |
1986 | being a COMPARE of an arithmetic operation with the constant zero. | |
1987 | The second element of the vector will set some pseudo to the result | |
1988 | of the same arithmetic operation. If we simplify the COMPARE, we won't | |
1989 | match such a pattern and so will generate an extra insn. Here we test | |
1990 | for this case, where both the comparison and the operation result are | |
1991 | needed, and make the PARALLEL by just replacing I2DEST in I3SRC with | |
1992 | I2SRC. Later we will make the PARALLEL that contains I2. */ | |
1993 | ||
1994 | if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET | |
1995 | && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE | |
1996 | && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx | |
1997 | && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest)) | |
1998 | { | |
94134f42 | 1999 | #ifdef SELECT_CC_MODE |
230d793d RS |
2000 | rtx *cc_use; |
2001 | enum machine_mode compare_mode; | |
081f5e7e | 2002 | #endif |
230d793d RS |
2003 | |
2004 | newpat = PATTERN (i3); | |
2005 | SUBST (XEXP (SET_SRC (newpat), 0), i2src); | |
2006 | ||
2007 | i2_is_used = 1; | |
2008 | ||
94134f42 | 2009 | #ifdef SELECT_CC_MODE |
230d793d RS |
2010 | /* See if a COMPARE with the operand we substituted in should be done |
2011 | with the mode that is currently being used. If not, do the same | |
2012 | processing we do in `subst' for a SET; namely, if the destination | |
2013 | is used only once, try to replace it with a register of the proper | |
2014 | mode and also replace the COMPARE. */ | |
2015 | if (undobuf.other_insn == 0 | |
2016 | && (cc_use = find_single_use (SET_DEST (newpat), i3, | |
2017 | &undobuf.other_insn)) | |
77fa0940 RK |
2018 | && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use), |
2019 | i2src, const0_rtx)) | |
230d793d RS |
2020 | != GET_MODE (SET_DEST (newpat)))) |
2021 | { | |
770ae6cc | 2022 | unsigned int regno = REGNO (SET_DEST (newpat)); |
38a448ca | 2023 | rtx new_dest = gen_rtx_REG (compare_mode, regno); |
230d793d RS |
2024 | |
2025 | if (regno < FIRST_PSEUDO_REGISTER | |
b1f21e0a | 2026 | || (REG_N_SETS (regno) == 1 && ! added_sets_2 |
230d793d RS |
2027 | && ! REG_USERVAR_P (SET_DEST (newpat)))) |
2028 | { | |
2029 | if (regno >= FIRST_PSEUDO_REGISTER) | |
2030 | SUBST (regno_reg_rtx[regno], new_dest); | |
2031 | ||
2032 | SUBST (SET_DEST (newpat), new_dest); | |
2033 | SUBST (XEXP (*cc_use, 0), new_dest); | |
2034 | SUBST (SET_SRC (newpat), | |
f1c6ba8b | 2035 | gen_rtx_COMPARE (compare_mode, i2src, const0_rtx)); |
230d793d RS |
2036 | } |
2037 | else | |
2038 | undobuf.other_insn = 0; | |
2039 | } | |
663522cb | 2040 | #endif |
230d793d RS |
2041 | } |
2042 | else | |
2043 | #endif | |
2044 | { | |
2045 | n_occurrences = 0; /* `subst' counts here */ | |
2046 | ||
2047 | /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we | |
2048 | need to make a unique copy of I2SRC each time we substitute it | |
2049 | to avoid self-referential rtl. */ | |
2050 | ||
d0ab8cd3 | 2051 | subst_low_cuid = INSN_CUID (i2); |
230d793d RS |
2052 | newpat = subst (PATTERN (i3), i2dest, i2src, 0, |
2053 | ! i1_feeds_i3 && i1dest_in_i1src); | |
cddd8b72 | 2054 | substed_i2 = 1; |
230d793d RS |
2055 | |
2056 | /* Record whether i2's body now appears within i3's body. */ | |
2057 | i2_is_used = n_occurrences; | |
2058 | } | |
2059 | ||
2060 | /* If we already got a failure, don't try to do more. Otherwise, | |
2061 | try to substitute in I1 if we have it. */ | |
2062 | ||
2063 | if (i1 && GET_CODE (newpat) != CLOBBER) | |
2064 | { | |
2065 | /* Before we can do this substitution, we must redo the test done | |
2066 | above (see detailed comments there) that ensures that I1DEST | |
0f41302f | 2067 | isn't mentioned in any SETs in NEWPAT that are field assignments. */ |
230d793d | 2068 | |
5f4f0e22 | 2069 | if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX, |
cf0d9408 | 2070 | 0, (rtx*) 0)) |
230d793d RS |
2071 | { |
2072 | undo_all (); | |
2073 | return 0; | |
2074 | } | |
2075 | ||
2076 | n_occurrences = 0; | |
d0ab8cd3 | 2077 | subst_low_cuid = INSN_CUID (i1); |
230d793d | 2078 | newpat = subst (newpat, i1dest, i1src, 0, 0); |
cddd8b72 | 2079 | substed_i1 = 1; |
230d793d RS |
2080 | } |
2081 | ||
916f14f1 RK |
2082 | /* Fail if an autoincrement side-effect has been duplicated. Be careful |
2083 | to count all the ways that I2SRC and I1SRC can be used. */ | |
5f4f0e22 | 2084 | if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0 |
916f14f1 | 2085 | && i2_is_used + added_sets_2 > 1) |
5f4f0e22 | 2086 | || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0 |
916f14f1 RK |
2087 | && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3) |
2088 | > 1)) | |
230d793d RS |
2089 | /* Fail if we tried to make a new register (we used to abort, but there's |
2090 | really no reason to). */ | |
2091 | || max_reg_num () != maxreg | |
2092 | /* Fail if we couldn't do something and have a CLOBBER. */ | |
df7d75de RK |
2093 | || GET_CODE (newpat) == CLOBBER |
2094 | /* Fail if this new pattern is a MULT and we didn't have one before | |
2095 | at the outer level. */ | |
2096 | || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT | |
2097 | && ! have_mult)) | |
230d793d RS |
2098 | { |
2099 | undo_all (); | |
2100 | return 0; | |
2101 | } | |
2102 | ||
2103 | /* If the actions of the earlier insns must be kept | |
2104 | in addition to substituting them into the latest one, | |
2105 | we must make a new PARALLEL for the latest insn | |
2106 | to hold additional the SETs. */ | |
2107 | ||
2108 | if (added_sets_1 || added_sets_2) | |
2109 | { | |
2110 | combine_extras++; | |
2111 | ||
2112 | if (GET_CODE (newpat) == PARALLEL) | |
2113 | { | |
2114 | rtvec old = XVEC (newpat, 0); | |
2115 | total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2; | |
38a448ca | 2116 | newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets)); |
d38a30c9 KG |
2117 | memcpy (XVEC (newpat, 0)->elem, &old->elem[0], |
2118 | sizeof (old->elem[0]) * old->num_elem); | |
230d793d RS |
2119 | } |
2120 | else | |
2121 | { | |
2122 | rtx old = newpat; | |
2123 | total_sets = 1 + added_sets_1 + added_sets_2; | |
38a448ca | 2124 | newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets)); |
230d793d RS |
2125 | XVECEXP (newpat, 0, 0) = old; |
2126 | } | |
2127 | ||
cf0d9408 KH |
2128 | if (added_sets_1) |
2129 | XVECEXP (newpat, 0, --total_sets) | |
2130 | = (GET_CODE (PATTERN (i1)) == PARALLEL | |
2131 | ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1)); | |
2132 | ||
2133 | if (added_sets_2) | |
2134 | { | |
2135 | /* If there is no I1, use I2's body as is. We used to also not do | |
2136 | the subst call below if I2 was substituted into I3, | |
2137 | but that could lose a simplification. */ | |
2138 | if (i1 == 0) | |
2139 | XVECEXP (newpat, 0, --total_sets) = i2pat; | |
2140 | else | |
2141 | /* See comment where i2pat is assigned. */ | |
2142 | XVECEXP (newpat, 0, --total_sets) | |
2143 | = subst (i2pat, i1dest, i1src, 0, 0); | |
2144 | } | |
230d793d RS |
2145 | } |
2146 | ||
2147 | /* We come here when we are replacing a destination in I2 with the | |
2148 | destination of I3. */ | |
2149 | validate_replacement: | |
2150 | ||
6e25d159 RK |
2151 | /* Note which hard regs this insn has as inputs. */ |
2152 | mark_used_regs_combine (newpat); | |
2153 | ||
230d793d | 2154 | /* Is the result of combination a valid instruction? */ |
8e2f6e35 | 2155 | insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); |
230d793d RS |
2156 | |
2157 | /* If the result isn't valid, see if it is a PARALLEL of two SETs where | |
8051c2eb AM |
2158 | the second SET's destination is a register that is unused and isn't |
2159 | marked as an instruction that might trap in an EH region. In that case, | |
230d793d RS |
2160 | we just need the first SET. This can occur when simplifying a divmod |
2161 | insn. We *must* test for this case here because the code below that | |
2162 | splits two independent SETs doesn't handle this case correctly when it | |
da6fdad3 | 2163 | updates the register status. |
230d793d | 2164 | |
da6fdad3 AM |
2165 | It's pointless doing this if we originally had two sets, one from |
2166 | i3, and one from i2. Combining then splitting the parallel results | |
2167 | in the original i2 again plus an invalid insn (which we delete). | |
2168 | The net effect is only to move instructions around, which makes | |
2169 | debug info less accurate. | |
2170 | ||
2171 | Also check the case where the first SET's destination is unused. | |
2172 | That would not cause incorrect code, but does cause an unneeded | |
2173 | insn to remain. */ | |
2174 | ||
2175 | if (insn_code_number < 0 | |
2176 | && !(added_sets_2 && i1 == 0) | |
2177 | && GET_CODE (newpat) == PARALLEL | |
230d793d RS |
2178 | && XVECLEN (newpat, 0) == 2 |
2179 | && GET_CODE (XVECEXP (newpat, 0, 0)) == SET | |
2180 | && GET_CODE (XVECEXP (newpat, 0, 1)) == SET | |
230d793d RS |
2181 | && asm_noperands (newpat) < 0) |
2182 | { | |
5c881655 KH |
2183 | rtx set0 = XVECEXP (newpat, 0, 0); |
2184 | rtx set1 = XVECEXP (newpat, 0, 1); | |
8051c2eb AM |
2185 | rtx note; |
2186 | ||
f8cfc6aa | 2187 | if (((REG_P (SET_DEST (set1)) |
8051c2eb AM |
2188 | && find_reg_note (i3, REG_UNUSED, SET_DEST (set1))) |
2189 | || (GET_CODE (SET_DEST (set1)) == SUBREG | |
2190 | && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1))))) | |
2191 | && (!(note = find_reg_note (i3, REG_EH_REGION, NULL_RTX)) | |
2192 | || INTVAL (XEXP (note, 0)) <= 0) | |
2193 | && ! side_effects_p (SET_SRC (set1))) | |
2194 | { | |
2195 | newpat = set0; | |
2196 | insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); | |
2197 | } | |
2198 | ||
f8cfc6aa | 2199 | else if (((REG_P (SET_DEST (set0)) |
8051c2eb AM |
2200 | && find_reg_note (i3, REG_UNUSED, SET_DEST (set0))) |
2201 | || (GET_CODE (SET_DEST (set0)) == SUBREG | |
2202 | && find_reg_note (i3, REG_UNUSED, | |
2203 | SUBREG_REG (SET_DEST (set0))))) | |
2204 | && (!(note = find_reg_note (i3, REG_EH_REGION, NULL_RTX)) | |
2205 | || INTVAL (XEXP (note, 0)) <= 0) | |
2206 | && ! side_effects_p (SET_SRC (set0))) | |
2207 | { | |
2208 | newpat = set1; | |
2209 | insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); | |
2210 | ||
2211 | if (insn_code_number >= 0) | |
2212 | { | |
2213 | /* If we will be able to accept this, we have made a | |
2214 | change to the destination of I3. This requires us to | |
2215 | do a few adjustments. */ | |
2216 | ||
2217 | PATTERN (i3) = newpat; | |
2218 | adjust_for_new_dest (i3); | |
2219 | } | |
2220 | } | |
230d793d RS |
2221 | } |
2222 | ||
2223 | /* If we were combining three insns and the result is a simple SET | |
2224 | with no ASM_OPERANDS that wasn't recognized, try to split it into two | |
663522cb | 2225 | insns. There are two ways to do this. It can be split using a |
916f14f1 RK |
2226 | machine-specific method (like when you have an addition of a large |
2227 | constant) or by combine in the function find_split_point. */ | |
2228 | ||
230d793d RS |
2229 | if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET |
2230 | && asm_noperands (newpat) < 0) | |
2231 | { | |
916f14f1 | 2232 | rtx m_split, *split; |
42495ca0 | 2233 | rtx ni2dest = i2dest; |
916f14f1 RK |
2234 | |
2235 | /* See if the MD file can split NEWPAT. If it can't, see if letting it | |
42495ca0 RK |
2236 | use I2DEST as a scratch register will help. In the latter case, |
2237 | convert I2DEST to the mode of the source of NEWPAT if we can. */ | |
916f14f1 RK |
2238 | |
2239 | m_split = split_insns (newpat, i3); | |
a70c61d9 JW |
2240 | |
2241 | /* We can only use I2DEST as a scratch reg if it doesn't overlap any | |
2242 | inputs of NEWPAT. */ | |
2243 | ||
2244 | /* ??? If I2DEST is not safe, and I1DEST exists, then it would be | |
2245 | possible to try that as a scratch reg. This would require adding | |
2246 | more code to make it work though. */ | |
2247 | ||
2248 | if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat)) | |
42495ca0 RK |
2249 | { |
2250 | /* If I2DEST is a hard register or the only use of a pseudo, | |
2251 | we can change its mode. */ | |
2252 | if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest) | |
02f4ada4 | 2253 | && GET_MODE (SET_DEST (newpat)) != VOIDmode |
f8cfc6aa | 2254 | && REG_P (i2dest) |
42495ca0 | 2255 | && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER |
b1f21e0a | 2256 | || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2 |
42495ca0 | 2257 | && ! REG_USERVAR_P (i2dest)))) |
38a448ca | 2258 | ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)), |
c5c76735 JL |
2259 | REGNO (i2dest)); |
2260 | ||
2261 | m_split = split_insns (gen_rtx_PARALLEL | |
2262 | (VOIDmode, | |
2263 | gen_rtvec (2, newpat, | |
2264 | gen_rtx_CLOBBER (VOIDmode, | |
2265 | ni2dest))), | |
2266 | i3); | |
5dd3e650 R |
2267 | /* If the split with the mode-changed register didn't work, try |
2268 | the original register. */ | |
2269 | if (! m_split && ni2dest != i2dest) | |
c7ca5912 RK |
2270 | { |
2271 | ni2dest = i2dest; | |
2272 | m_split = split_insns (gen_rtx_PARALLEL | |
2273 | (VOIDmode, | |
2274 | gen_rtvec (2, newpat, | |
2275 | gen_rtx_CLOBBER (VOIDmode, | |
2276 | i2dest))), | |
2277 | i3); | |
2278 | } | |
42495ca0 | 2279 | } |
916f14f1 | 2280 | |
2f937369 | 2281 | if (m_split && NEXT_INSN (m_split) == NULL_RTX) |
d340408c | 2282 | { |
2f937369 | 2283 | m_split = PATTERN (m_split); |
d340408c RH |
2284 | insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes); |
2285 | if (insn_code_number >= 0) | |
2286 | newpat = m_split; | |
23190837 | 2287 | } |
2f937369 | 2288 | else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX |
d340408c | 2289 | && (next_real_insn (i2) == i3 |
2f937369 | 2290 | || ! use_crosses_set_p (PATTERN (m_split), INSN_CUID (i2)))) |
916f14f1 | 2291 | { |
1a26b032 | 2292 | rtx i2set, i3set; |
2f937369 DM |
2293 | rtx newi3pat = PATTERN (NEXT_INSN (m_split)); |
2294 | newi2pat = PATTERN (m_split); | |
916f14f1 | 2295 | |
2f937369 DM |
2296 | i3set = single_set (NEXT_INSN (m_split)); |
2297 | i2set = single_set (m_split); | |
1a26b032 | 2298 | |
42495ca0 RK |
2299 | /* In case we changed the mode of I2DEST, replace it in the |
2300 | pseudo-register table here. We can't do it above in case this | |
2301 | code doesn't get executed and we do a split the other way. */ | |
2302 | ||
2303 | if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER) | |
2304 | SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest); | |
2305 | ||
8e2f6e35 | 2306 | i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes); |
1a26b032 RK |
2307 | |
2308 | /* If I2 or I3 has multiple SETs, we won't know how to track | |
9cc96794 RK |
2309 | register status, so don't use these insns. If I2's destination |
2310 | is used between I2 and I3, we also can't use these insns. */ | |
1a26b032 | 2311 | |
9cc96794 RK |
2312 | if (i2_code_number >= 0 && i2set && i3set |
2313 | && (next_real_insn (i2) == i3 | |
2314 | || ! reg_used_between_p (SET_DEST (i2set), i2, i3))) | |
8e2f6e35 BS |
2315 | insn_code_number = recog_for_combine (&newi3pat, i3, |
2316 | &new_i3_notes); | |
d0ab8cd3 RK |
2317 | if (insn_code_number >= 0) |
2318 | newpat = newi3pat; | |
2319 | ||
c767f54b | 2320 | /* It is possible that both insns now set the destination of I3. |
22609cbf | 2321 | If so, we must show an extra use of it. */ |
c767f54b | 2322 | |
393de53f RK |
2323 | if (insn_code_number >= 0) |
2324 | { | |
2325 | rtx new_i3_dest = SET_DEST (i3set); | |
2326 | rtx new_i2_dest = SET_DEST (i2set); | |
2327 | ||
2328 | while (GET_CODE (new_i3_dest) == ZERO_EXTRACT | |
2329 | || GET_CODE (new_i3_dest) == STRICT_LOW_PART | |
2330 | || GET_CODE (new_i3_dest) == SUBREG) | |
2331 | new_i3_dest = XEXP (new_i3_dest, 0); | |
2332 | ||
d4096689 RK |
2333 | while (GET_CODE (new_i2_dest) == ZERO_EXTRACT |
2334 | || GET_CODE (new_i2_dest) == STRICT_LOW_PART | |
2335 | || GET_CODE (new_i2_dest) == SUBREG) | |
2336 | new_i2_dest = XEXP (new_i2_dest, 0); | |
2337 | ||
f8cfc6aa JQ |
2338 | if (REG_P (new_i3_dest) |
2339 | && REG_P (new_i2_dest) | |
393de53f | 2340 | && REGNO (new_i3_dest) == REGNO (new_i2_dest)) |
b1f21e0a | 2341 | REG_N_SETS (REGNO (new_i2_dest))++; |
393de53f | 2342 | } |
916f14f1 | 2343 | } |
230d793d RS |
2344 | |
2345 | /* If we can split it and use I2DEST, go ahead and see if that | |
2346 | helps things be recognized. Verify that none of the registers | |
2347 | are set between I2 and I3. */ | |
d0ab8cd3 | 2348 | if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0 |
230d793d | 2349 | #ifdef HAVE_cc0 |
f8cfc6aa | 2350 | && REG_P (i2dest) |
230d793d RS |
2351 | #endif |
2352 | /* We need I2DEST in the proper mode. If it is a hard register | |
2353 | or the only use of a pseudo, we can change its mode. */ | |
2354 | && (GET_MODE (*split) == GET_MODE (i2dest) | |
2355 | || GET_MODE (*split) == VOIDmode | |
2356 | || REGNO (i2dest) < FIRST_PSEUDO_REGISTER | |
b1f21e0a | 2357 | || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2 |
230d793d RS |
2358 | && ! REG_USERVAR_P (i2dest))) |
2359 | && (next_real_insn (i2) == i3 | |
2360 | || ! use_crosses_set_p (*split, INSN_CUID (i2))) | |
2361 | /* We can't overwrite I2DEST if its value is still used by | |
2362 | NEWPAT. */ | |
2363 | && ! reg_referenced_p (i2dest, newpat)) | |
2364 | { | |
2365 | rtx newdest = i2dest; | |
df7d75de RK |
2366 | enum rtx_code split_code = GET_CODE (*split); |
2367 | enum machine_mode split_mode = GET_MODE (*split); | |
230d793d RS |
2368 | |
2369 | /* Get NEWDEST as a register in the proper mode. We have already | |
2370 | validated that we can do this. */ | |
df7d75de | 2371 | if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode) |
230d793d | 2372 | { |
38a448ca | 2373 | newdest = gen_rtx_REG (split_mode, REGNO (i2dest)); |
230d793d RS |
2374 | |
2375 | if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER) | |
2376 | SUBST (regno_reg_rtx[REGNO (i2dest)], newdest); | |
2377 | } | |
2378 | ||
2379 | /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to | |
2380 | an ASHIFT. This can occur if it was inside a PLUS and hence | |
2381 | appeared to be a memory address. This is a kludge. */ | |
df7d75de | 2382 | if (split_code == MULT |
230d793d | 2383 | && GET_CODE (XEXP (*split, 1)) == CONST_INT |
1568d79b | 2384 | && INTVAL (XEXP (*split, 1)) > 0 |
230d793d | 2385 | && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0) |
1dc8a823 | 2386 | { |
f1c6ba8b RK |
2387 | SUBST (*split, gen_rtx_ASHIFT (split_mode, |
2388 | XEXP (*split, 0), GEN_INT (i))); | |
1dc8a823 JW |
2389 | /* Update split_code because we may not have a multiply |
2390 | anymore. */ | |
2391 | split_code = GET_CODE (*split); | |
2392 | } | |
230d793d RS |
2393 | |
2394 | #ifdef INSN_SCHEDULING | |
2395 | /* If *SPLIT is a paradoxical SUBREG, when we split it, it should | |
2396 | be written as a ZERO_EXTEND. */ | |
3c0cb5de | 2397 | if (split_code == SUBREG && MEM_P (SUBREG_REG (*split))) |
25c25947 R |
2398 | { |
2399 | #ifdef LOAD_EXTEND_OP | |
2400 | /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's | |
2401 | what it really is. */ | |
2402 | if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split))) | |
2403 | == SIGN_EXTEND) | |
2404 | SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode, | |
2405 | SUBREG_REG (*split))); | |
2406 | else | |
2407 | #endif | |
2408 | SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode, | |
2409 | SUBREG_REG (*split))); | |
2410 | } | |
230d793d RS |
2411 | #endif |
2412 | ||
f1c6ba8b | 2413 | newi2pat = gen_rtx_SET (VOIDmode, newdest, *split); |
230d793d | 2414 | SUBST (*split, newdest); |
8e2f6e35 | 2415 | i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes); |
df7d75de RK |
2416 | |
2417 | /* If the split point was a MULT and we didn't have one before, | |
2418 | don't use one now. */ | |
2419 | if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult)) | |
8e2f6e35 | 2420 | insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); |
230d793d RS |
2421 | } |
2422 | } | |
2423 | ||
2424 | /* Check for a case where we loaded from memory in a narrow mode and | |
2425 | then sign extended it, but we need both registers. In that case, | |
2426 | we have a PARALLEL with both loads from the same memory location. | |
2427 | We can split this into a load from memory followed by a register-register | |
2428 | copy. This saves at least one insn, more if register allocation can | |
f0343c74 RK |
2429 | eliminate the copy. |
2430 | ||
a9b2f059 JW |
2431 | We cannot do this if the destination of the first assignment is a |
2432 | condition code register or cc0. We eliminate this case by making sure | |
2433 | the SET_DEST and SET_SRC have the same mode. | |
2434 | ||
f0343c74 RK |
2435 | We cannot do this if the destination of the second assignment is |
2436 | a register that we have already assumed is zero-extended. Similarly | |
2437 | for a SUBREG of such a register. */ | |
230d793d RS |
2438 | |
2439 | else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0 | |
2440 | && GET_CODE (newpat) == PARALLEL | |
2441 | && XVECLEN (newpat, 0) == 2 | |
2442 | && GET_CODE (XVECEXP (newpat, 0, 0)) == SET | |
2443 | && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND | |
a9b2f059 JW |
2444 | && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0))) |
2445 | == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0)))) | |
230d793d RS |
2446 | && GET_CODE (XVECEXP (newpat, 0, 1)) == SET |
2447 | && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)), | |
2448 | XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0)) | |
2449 | && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)), | |
2450 | INSN_CUID (i2)) | |
2451 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT | |
2452 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART | |
f0343c74 | 2453 | && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)), |
f8cfc6aa | 2454 | (REG_P (temp) |
5eaad481 | 2455 | && reg_stat[REGNO (temp)].nonzero_bits != 0 |
f0343c74 RK |
2456 | && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD |
2457 | && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT | |
5eaad481 | 2458 | && (reg_stat[REGNO (temp)].nonzero_bits |
f0343c74 RK |
2459 | != GET_MODE_MASK (word_mode)))) |
2460 | && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG | |
2461 | && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))), | |
f8cfc6aa | 2462 | (REG_P (temp) |
5eaad481 | 2463 | && reg_stat[REGNO (temp)].nonzero_bits != 0 |
f0343c74 RK |
2464 | && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD |
2465 | && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT | |
5eaad481 | 2466 | && (reg_stat[REGNO (temp)].nonzero_bits |
f0343c74 | 2467 | != GET_MODE_MASK (word_mode))))) |
230d793d RS |
2468 | && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)), |
2469 | SET_SRC (XVECEXP (newpat, 0, 1))) | |
2470 | && ! find_reg_note (i3, REG_UNUSED, | |
2471 | SET_DEST (XVECEXP (newpat, 0, 0)))) | |
2472 | { | |
472fbdd1 RK |
2473 | rtx ni2dest; |
2474 | ||
230d793d | 2475 | newi2pat = XVECEXP (newpat, 0, 0); |
472fbdd1 | 2476 | ni2dest = SET_DEST (XVECEXP (newpat, 0, 0)); |
230d793d RS |
2477 | newpat = XVECEXP (newpat, 0, 1); |
2478 | SUBST (SET_SRC (newpat), | |
4de249d9 | 2479 | gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest)); |
8e2f6e35 | 2480 | i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes); |
a29ca9db | 2481 | |
230d793d | 2482 | if (i2_code_number >= 0) |
8e2f6e35 | 2483 | insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); |
5089e22e RS |
2484 | |
2485 | if (insn_code_number >= 0) | |
9e42ab3e | 2486 | swap_i2i3 = 1; |
230d793d | 2487 | } |
663522cb | 2488 | |
230d793d RS |
2489 | /* Similarly, check for a case where we have a PARALLEL of two independent |
2490 | SETs but we started with three insns. In this case, we can do the sets | |
2491 | as two separate insns. This case occurs when some SET allows two | |
2492 | other insns to combine, but the destination of that SET is still live. */ | |
2493 | ||
2494 | else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0 | |
2495 | && GET_CODE (newpat) == PARALLEL | |
2496 | && XVECLEN (newpat, 0) == 2 | |
2497 | && GET_CODE (XVECEXP (newpat, 0, 0)) == SET | |
2498 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT | |
2499 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART | |
2500 | && GET_CODE (XVECEXP (newpat, 0, 1)) == SET | |
2501 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT | |
2502 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART | |
2503 | && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)), | |
2504 | INSN_CUID (i2)) | |
2505 | /* Don't pass sets with (USE (MEM ...)) dests to the following. */ | |
2506 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE | |
2507 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE | |
2508 | && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)), | |
2509 | XVECEXP (newpat, 0, 0)) | |
2510 | && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)), | |
14a774a9 RK |
2511 | XVECEXP (newpat, 0, 1)) |
2512 | && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0))) | |
2513 | && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1))))) | |
230d793d | 2514 | { |
e9a25f70 JL |
2515 | /* Normally, it doesn't matter which of the two is done first, |
2516 | but it does if one references cc0. In that case, it has to | |
2517 | be first. */ | |
2518 | #ifdef HAVE_cc0 | |
2519 | if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0))) | |
2520 | { | |
2521 | newi2pat = XVECEXP (newpat, 0, 0); | |
2522 | newpat = XVECEXP (newpat, 0, 1); | |
2523 | } | |
2524 | else | |
2525 | #endif | |
2526 | { | |
2527 | newi2pat = XVECEXP (newpat, 0, 1); | |
2528 | newpat = XVECEXP (newpat, 0, 0); | |
2529 | } | |
230d793d | 2530 | |
8e2f6e35 | 2531 | i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes); |
a29ca9db | 2532 | |
230d793d | 2533 | if (i2_code_number >= 0) |
8e2f6e35 | 2534 | insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); |
230d793d RS |
2535 | } |
2536 | ||
2537 | /* If it still isn't recognized, fail and change things back the way they | |
2538 | were. */ | |
2539 | if ((insn_code_number < 0 | |
2540 | /* Is the result a reasonable ASM_OPERANDS? */ | |
2541 | && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2))) | |
2542 | { | |
2543 | undo_all (); | |
2544 | return 0; | |
2545 | } | |
2546 | ||
2547 | /* If we had to change another insn, make sure it is valid also. */ | |
2548 | if (undobuf.other_insn) | |
2549 | { | |
230d793d RS |
2550 | rtx other_pat = PATTERN (undobuf.other_insn); |
2551 | rtx new_other_notes; | |
2552 | rtx note, next; | |
2553 | ||
6e25d159 RK |
2554 | CLEAR_HARD_REG_SET (newpat_used_regs); |
2555 | ||
8e2f6e35 BS |
2556 | other_code_number = recog_for_combine (&other_pat, undobuf.other_insn, |
2557 | &new_other_notes); | |
230d793d RS |
2558 | |
2559 | if (other_code_number < 0 && ! check_asm_operands (other_pat)) | |
2560 | { | |
2561 | undo_all (); | |
2562 | return 0; | |
2563 | } | |
2564 | ||
2565 | PATTERN (undobuf.other_insn) = other_pat; | |
2566 | ||
2567 | /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they | |
2568 | are still valid. Then add any non-duplicate notes added by | |
2569 | recog_for_combine. */ | |
2570 | for (note = REG_NOTES (undobuf.other_insn); note; note = next) | |
2571 | { | |
2572 | next = XEXP (note, 1); | |
2573 | ||
2574 | if (REG_NOTE_KIND (note) == REG_UNUSED | |
2575 | && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn))) | |
1a26b032 | 2576 | { |
f8cfc6aa | 2577 | if (REG_P (XEXP (note, 0))) |
b1f21e0a | 2578 | REG_N_DEATHS (REGNO (XEXP (note, 0)))--; |
1a26b032 RK |
2579 | |
2580 | remove_note (undobuf.other_insn, note); | |
2581 | } | |
230d793d RS |
2582 | } |
2583 | ||
1a26b032 | 2584 | for (note = new_other_notes; note; note = XEXP (note, 1)) |
f8cfc6aa | 2585 | if (REG_P (XEXP (note, 0))) |
b1f21e0a | 2586 | REG_N_DEATHS (REGNO (XEXP (note, 0)))++; |
1a26b032 | 2587 | |
230d793d | 2588 | distribute_notes (new_other_notes, undobuf.other_insn, |
72531479 | 2589 | undobuf.other_insn, NULL_RTX); |
230d793d | 2590 | } |
5ef17dd2 | 2591 | #ifdef HAVE_cc0 |
1f52178b | 2592 | /* If I2 is the CC0 setter and I3 is the CC0 user then check whether |
ec5c56db | 2593 | they are adjacent to each other or not. */ |
5ef17dd2 CC |
2594 | { |
2595 | rtx p = prev_nonnote_insn (i3); | |
4b4bf941 | 2596 | if (p && p != i2 && NONJUMP_INSN_P (p) && newi2pat |
663522cb | 2597 | && sets_cc0_p (newi2pat)) |
5ef17dd2 | 2598 | { |
663522cb KH |
2599 | undo_all (); |
2600 | return 0; | |
5ef17dd2 | 2601 | } |
663522cb KH |
2602 | } |
2603 | #endif | |
230d793d | 2604 | |
6fd21094 | 2605 | /* Only allow this combination if insn_rtx_costs reports that the |
64b8935d RS |
2606 | replacement instructions are cheaper than the originals. */ |
2607 | if (!combine_validate_cost (i1, i2, i3, newpat, newi2pat)) | |
2608 | { | |
2609 | undo_all (); | |
2610 | return 0; | |
2611 | } | |
2612 | ||
663522cb | 2613 | /* We now know that we can do this combination. Merge the insns and |
230d793d RS |
2614 | update the status of registers and LOG_LINKS. */ |
2615 | ||
9e42ab3e RZ |
2616 | if (swap_i2i3) |
2617 | { | |
2618 | rtx insn; | |
2619 | rtx link; | |
2620 | rtx ni2dest; | |
2621 | ||
2622 | /* I3 now uses what used to be its destination and which is now | |
2623 | I2's destination. This requires us to do a few adjustments. */ | |
2624 | PATTERN (i3) = newpat; | |
2625 | adjust_for_new_dest (i3); | |
2626 | ||
2627 | /* We need a LOG_LINK from I3 to I2. But we used to have one, | |
2628 | so we still will. | |
2629 | ||
2630 | However, some later insn might be using I2's dest and have | |
2631 | a LOG_LINK pointing at I3. We must remove this link. | |
2632 | The simplest way to remove the link is to point it at I1, | |
2633 | which we know will be a NOTE. */ | |
2634 | ||
0b21d1dc UW |
2635 | /* newi2pat is usually a SET here; however, recog_for_combine might |
2636 | have added some clobbers. */ | |
2637 | if (GET_CODE (newi2pat) == PARALLEL) | |
2638 | ni2dest = SET_DEST (XVECEXP (newi2pat, 0, 0)); | |
2639 | else | |
2640 | ni2dest = SET_DEST (newi2pat); | |
2641 | ||
9e42ab3e RZ |
2642 | for (insn = NEXT_INSN (i3); |
2643 | insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR | |
2644 | || insn != BB_HEAD (this_basic_block->next_bb)); | |
2645 | insn = NEXT_INSN (insn)) | |
2646 | { | |
2647 | if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn))) | |
2648 | { | |
2649 | for (link = LOG_LINKS (insn); link; | |
2650 | link = XEXP (link, 1)) | |
2651 | if (XEXP (link, 0) == i3) | |
2652 | XEXP (link, 0) = i1; | |
2653 | ||
2654 | break; | |
2655 | } | |
2656 | } | |
2657 | } | |
2658 | ||
230d793d RS |
2659 | { |
2660 | rtx i3notes, i2notes, i1notes = 0; | |
2661 | rtx i3links, i2links, i1links = 0; | |
2662 | rtx midnotes = 0; | |
770ae6cc | 2663 | unsigned int regno; |
230d793d RS |
2664 | |
2665 | /* Get the old REG_NOTES and LOG_LINKS from all our insns and | |
2666 | clear them. */ | |
2667 | i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3); | |
2668 | i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2); | |
2669 | if (i1) | |
2670 | i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1); | |
2671 | ||
2672 | /* Ensure that we do not have something that should not be shared but | |
2673 | occurs multiple times in the new insns. Check this by first | |
5089e22e | 2674 | resetting all the `used' flags and then copying anything is shared. */ |
230d793d RS |
2675 | |
2676 | reset_used_flags (i3notes); | |
2677 | reset_used_flags (i2notes); | |
2678 | reset_used_flags (i1notes); | |
2679 | reset_used_flags (newpat); | |
2680 | reset_used_flags (newi2pat); | |
2681 | if (undobuf.other_insn) | |
2682 | reset_used_flags (PATTERN (undobuf.other_insn)); | |
2683 | ||
2684 | i3notes = copy_rtx_if_shared (i3notes); | |
2685 | i2notes = copy_rtx_if_shared (i2notes); | |
2686 | i1notes = copy_rtx_if_shared (i1notes); | |
2687 | newpat = copy_rtx_if_shared (newpat); | |
2688 | newi2pat = copy_rtx_if_shared (newi2pat); | |
2689 | if (undobuf.other_insn) | |
2690 | reset_used_flags (PATTERN (undobuf.other_insn)); | |
2691 | ||
2692 | INSN_CODE (i3) = insn_code_number; | |
2693 | PATTERN (i3) = newpat; | |
cddd8b72 | 2694 | |
4b4bf941 | 2695 | if (CALL_P (i3) && CALL_INSN_FUNCTION_USAGE (i3)) |
cddd8b72 AO |
2696 | { |
2697 | rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3); | |
2698 | ||
2699 | reset_used_flags (call_usage); | |
2700 | call_usage = copy_rtx (call_usage); | |
2701 | ||
2702 | if (substed_i2) | |
2703 | replace_rtx (call_usage, i2dest, i2src); | |
2704 | ||
2705 | if (substed_i1) | |
2706 | replace_rtx (call_usage, i1dest, i1src); | |
2707 | ||
2708 | CALL_INSN_FUNCTION_USAGE (i3) = call_usage; | |
2709 | } | |
2710 | ||
230d793d RS |
2711 | if (undobuf.other_insn) |
2712 | INSN_CODE (undobuf.other_insn) = other_code_number; | |
2713 | ||
2714 | /* We had one special case above where I2 had more than one set and | |
2715 | we replaced a destination of one of those sets with the destination | |
2716 | of I3. In that case, we have to update LOG_LINKS of insns later | |
176c9e6b JW |
2717 | in this basic block. Note that this (expensive) case is rare. |
2718 | ||
2719 | Also, in this case, we must pretend that all REG_NOTEs for I2 | |
2720 | actually came from I3, so that REG_UNUSED notes from I2 will be | |
2721 | properly handled. */ | |
2722 | ||
c7be4f66 | 2723 | if (i3_subst_into_i2) |
176c9e6b | 2724 | { |
1786009e | 2725 | for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++) |
95ac07b0 | 2726 | if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != USE |
f8cfc6aa | 2727 | && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) |
1786009e ZW |
2728 | && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest |
2729 | && ! find_reg_note (i2, REG_UNUSED, | |
2730 | SET_DEST (XVECEXP (PATTERN (i2), 0, i)))) | |
2731 | for (temp = NEXT_INSN (i2); | |
f6366fc7 | 2732 | temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR |
a813c111 | 2733 | || BB_HEAD (this_basic_block) != temp); |
1786009e ZW |
2734 | temp = NEXT_INSN (temp)) |
2735 | if (temp != i3 && INSN_P (temp)) | |
2736 | for (link = LOG_LINKS (temp); link; link = XEXP (link, 1)) | |
2737 | if (XEXP (link, 0) == i2) | |
2738 | XEXP (link, 0) = i3; | |
176c9e6b JW |
2739 | |
2740 | if (i3notes) | |
2741 | { | |
2742 | rtx link = i3notes; | |
2743 | while (XEXP (link, 1)) | |
2744 | link = XEXP (link, 1); | |
2745 | XEXP (link, 1) = i2notes; | |
2746 | } | |
2747 | else | |
2748 | i3notes = i2notes; | |
2749 | i2notes = 0; | |
2750 | } | |
230d793d RS |
2751 | |
2752 | LOG_LINKS (i3) = 0; | |
2753 | REG_NOTES (i3) = 0; | |
2754 | LOG_LINKS (i2) = 0; | |
2755 | REG_NOTES (i2) = 0; | |
2756 | ||
2757 | if (newi2pat) | |
2758 | { | |
2759 | INSN_CODE (i2) = i2_code_number; | |
2760 | PATTERN (i2) = newi2pat; | |
2761 | } | |
2762 | else | |
6773e15f | 2763 | SET_INSN_DELETED (i2); |
230d793d RS |
2764 | |
2765 | if (i1) | |
2766 | { | |
2767 | LOG_LINKS (i1) = 0; | |
2768 | REG_NOTES (i1) = 0; | |
6773e15f | 2769 | SET_INSN_DELETED (i1); |
230d793d RS |
2770 | } |
2771 | ||
2772 | /* Get death notes for everything that is now used in either I3 or | |
663522cb | 2773 | I2 and used to die in a previous insn. If we built two new |
6eb12cef RK |
2774 | patterns, move from I1 to I2 then I2 to I3 so that we get the |
2775 | proper movement on registers that I2 modifies. */ | |
230d793d | 2776 | |
230d793d | 2777 | if (newi2pat) |
6eb12cef RK |
2778 | { |
2779 | move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes); | |
2780 | move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes); | |
2781 | } | |
2782 | else | |
2783 | move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2), | |
2784 | i3, &midnotes); | |
230d793d RS |
2785 | |
2786 | /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */ | |
2787 | if (i3notes) | |
72531479 | 2788 | distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX); |
230d793d | 2789 | if (i2notes) |
72531479 | 2790 | distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX); |
230d793d | 2791 | if (i1notes) |
72531479 | 2792 | distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX); |
230d793d | 2793 | if (midnotes) |
72531479 | 2794 | distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX); |
230d793d RS |
2795 | |
2796 | /* Distribute any notes added to I2 or I3 by recog_for_combine. We | |
2797 | know these are REG_UNUSED and want them to go to the desired insn, | |
663522cb | 2798 | so we always pass it as i3. We have not counted the notes in |
1a26b032 RK |
2799 | reg_n_deaths yet, so we need to do so now. */ |
2800 | ||
230d793d | 2801 | if (newi2pat && new_i2_notes) |
1a26b032 RK |
2802 | { |
2803 | for (temp = new_i2_notes; temp; temp = XEXP (temp, 1)) | |
f8cfc6aa | 2804 | if (REG_P (XEXP (temp, 0))) |
b1f21e0a | 2805 | REG_N_DEATHS (REGNO (XEXP (temp, 0)))++; |
663522cb | 2806 | |
72531479 | 2807 | distribute_notes (new_i2_notes, i2, i2, NULL_RTX); |
1a26b032 RK |
2808 | } |
2809 | ||
230d793d | 2810 | if (new_i3_notes) |
1a26b032 RK |
2811 | { |
2812 | for (temp = new_i3_notes; temp; temp = XEXP (temp, 1)) | |
f8cfc6aa | 2813 | if (REG_P (XEXP (temp, 0))) |
b1f21e0a | 2814 | REG_N_DEATHS (REGNO (XEXP (temp, 0)))++; |
663522cb | 2815 | |
72531479 | 2816 | distribute_notes (new_i3_notes, i3, i3, NULL_RTX); |
1a26b032 | 2817 | } |
230d793d RS |
2818 | |
2819 | /* If I3DEST was used in I3SRC, it really died in I3. We may need to | |
e9a25f70 JL |
2820 | put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets |
2821 | I3DEST, the death must be somewhere before I2, not I3. If we passed I3 | |
2822 | in that case, it might delete I2. Similarly for I2 and I1. | |
1a26b032 RK |
2823 | Show an additional death due to the REG_DEAD note we make here. If |
2824 | we discard it in distribute_notes, we will decrement it again. */ | |
d0ab8cd3 | 2825 | |
230d793d | 2826 | if (i3dest_killed) |
1a26b032 | 2827 | { |
f8cfc6aa | 2828 | if (REG_P (i3dest_killed)) |
b1f21e0a | 2829 | REG_N_DEATHS (REGNO (i3dest_killed))++; |
1a26b032 | 2830 | |
e9a25f70 | 2831 | if (newi2pat && reg_set_p (i3dest_killed, newi2pat)) |
38a448ca RH |
2832 | distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed, |
2833 | NULL_RTX), | |
72531479 | 2834 | NULL_RTX, i2, NULL_RTX); |
e9a25f70 | 2835 | else |
38a448ca RH |
2836 | distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed, |
2837 | NULL_RTX), | |
72531479 | 2838 | NULL_RTX, i3, newi2pat ? i2 : NULL_RTX); |
1a26b032 | 2839 | } |
58c8c593 | 2840 | |
230d793d | 2841 | if (i2dest_in_i2src) |
58c8c593 | 2842 | { |
f8cfc6aa | 2843 | if (REG_P (i2dest)) |
b1f21e0a | 2844 | REG_N_DEATHS (REGNO (i2dest))++; |
1a26b032 | 2845 | |
58c8c593 | 2846 | if (newi2pat && reg_set_p (i2dest, newi2pat)) |
38a448ca | 2847 | distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX), |
72531479 | 2848 | NULL_RTX, i2, NULL_RTX); |
58c8c593 | 2849 | else |
38a448ca | 2850 | distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX), |
72531479 | 2851 | NULL_RTX, i3, newi2pat ? i2 : NULL_RTX); |
58c8c593 RK |
2852 | } |
2853 | ||
230d793d | 2854 | if (i1dest_in_i1src) |
58c8c593 | 2855 | { |
f8cfc6aa | 2856 | if (REG_P (i1dest)) |
b1f21e0a | 2857 | REG_N_DEATHS (REGNO (i1dest))++; |
1a26b032 | 2858 | |
58c8c593 | 2859 | if (newi2pat && reg_set_p (i1dest, newi2pat)) |
38a448ca | 2860 | distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX), |
72531479 | 2861 | NULL_RTX, i2, NULL_RTX); |
58c8c593 | 2862 | else |
38a448ca | 2863 | distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX), |
72531479 | 2864 | NULL_RTX, i3, newi2pat ? i2 : NULL_RTX); |
58c8c593 | 2865 | } |
230d793d RS |
2866 | |
2867 | distribute_links (i3links); | |
2868 | distribute_links (i2links); | |
2869 | distribute_links (i1links); | |
2870 | ||
f8cfc6aa | 2871 | if (REG_P (i2dest)) |
230d793d | 2872 | { |
d0ab8cd3 RK |
2873 | rtx link; |
2874 | rtx i2_insn = 0, i2_val = 0, set; | |
2875 | ||
2876 | /* The insn that used to set this register doesn't exist, and | |
2877 | this life of the register may not exist either. See if one of | |
663522cb | 2878 | I3's links points to an insn that sets I2DEST. If it does, |
d0ab8cd3 RK |
2879 | that is now the last known value for I2DEST. If we don't update |
2880 | this and I2 set the register to a value that depended on its old | |
230d793d RS |
2881 | contents, we will get confused. If this insn is used, thing |
2882 | will be set correctly in combine_instructions. */ | |
d0ab8cd3 RK |
2883 | |
2884 | for (link = LOG_LINKS (i3); link; link = XEXP (link, 1)) | |
2885 | if ((set = single_set (XEXP (link, 0))) != 0 | |
2886 | && rtx_equal_p (i2dest, SET_DEST (set))) | |
2887 | i2_insn = XEXP (link, 0), i2_val = SET_SRC (set); | |
2888 | ||
2889 | record_value_for_reg (i2dest, i2_insn, i2_val); | |
230d793d RS |
2890 | |
2891 | /* If the reg formerly set in I2 died only once and that was in I3, | |
2892 | zero its use count so it won't make `reload' do any work. */ | |
538fe8cd ILT |
2893 | if (! added_sets_2 |
2894 | && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat)) | |
2895 | && ! i2dest_in_i2src) | |
230d793d RS |
2896 | { |
2897 | regno = REGNO (i2dest); | |
b1f21e0a | 2898 | REG_N_SETS (regno)--; |
230d793d RS |
2899 | } |
2900 | } | |
2901 | ||
f8cfc6aa | 2902 | if (i1 && REG_P (i1dest)) |
230d793d | 2903 | { |
d0ab8cd3 RK |
2904 | rtx link; |
2905 | rtx i1_insn = 0, i1_val = 0, set; | |
2906 | ||
2907 | for (link = LOG_LINKS (i3); link; link = XEXP (link, 1)) | |
2908 | if ((set = single_set (XEXP (link, 0))) != 0 | |
2909 | && rtx_equal_p (i1dest, SET_DEST (set))) | |
2910 | i1_insn = XEXP (link, 0), i1_val = SET_SRC (set); | |
2911 | ||
2912 | record_value_for_reg (i1dest, i1_insn, i1_val); | |
2913 | ||
230d793d | 2914 | regno = REGNO (i1dest); |
5af91171 | 2915 | if (! added_sets_1 && ! i1dest_in_i1src) |
770ae6cc | 2916 | REG_N_SETS (regno)--; |
230d793d RS |
2917 | } |
2918 | ||
5eaad481 PB |
2919 | /* Update reg_stat[].nonzero_bits et al for any changes that may have |
2920 | been made to this insn. The order of | |
2921 | set_nonzero_bits_and_sign_copies() is important. Because newi2pat | |
2922 | can affect nonzero_bits of newpat */ | |
22609cbf | 2923 | if (newi2pat) |
84832317 | 2924 | note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL); |
5fb7c247 | 2925 | note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL); |
22609cbf | 2926 | |
44a76fc8 AG |
2927 | /* Set new_direct_jump_p if a new return or simple jump instruction |
2928 | has been created. | |
2929 | ||
663522cb | 2930 | If I3 is now an unconditional jump, ensure that it has a |
230d793d | 2931 | BARRIER following it since it may have initially been a |
381ee8af | 2932 | conditional jump. It may also be the last nonnote insn. */ |
663522cb | 2933 | |
f40f4c8e | 2934 | if (returnjump_p (i3) || any_uncondjump_p (i3)) |
44a76fc8 AG |
2935 | { |
2936 | *new_direct_jump_p = 1; | |
9143c6b7 | 2937 | mark_jump_label (PATTERN (i3), i3, 0); |
230d793d | 2938 | |
44a76fc8 | 2939 | if ((temp = next_nonnote_insn (i3)) == NULL_RTX |
4b4bf941 | 2940 | || !BARRIER_P (temp)) |
44a76fc8 AG |
2941 | emit_barrier_after (i3); |
2942 | } | |
f40f4c8e RS |
2943 | |
2944 | if (undobuf.other_insn != NULL_RTX | |
2945 | && (returnjump_p (undobuf.other_insn) | |
2946 | || any_uncondjump_p (undobuf.other_insn))) | |
2947 | { | |
2948 | *new_direct_jump_p = 1; | |
2949 | ||
2950 | if ((temp = next_nonnote_insn (undobuf.other_insn)) == NULL_RTX | |
4b4bf941 | 2951 | || !BARRIER_P (temp)) |
f40f4c8e RS |
2952 | emit_barrier_after (undobuf.other_insn); |
2953 | } | |
73a39fc4 | 2954 | |
592a6d1d JH |
2955 | /* An NOOP jump does not need barrier, but it does need cleaning up |
2956 | of CFG. */ | |
2957 | if (GET_CODE (newpat) == SET | |
2958 | && SET_SRC (newpat) == pc_rtx | |
2959 | && SET_DEST (newpat) == pc_rtx) | |
2960 | *new_direct_jump_p = 1; | |
230d793d RS |
2961 | } |
2962 | ||
2963 | combine_successes++; | |
e7749837 | 2964 | undo_commit (); |
230d793d | 2965 | |
abe6e52f RK |
2966 | if (added_links_insn |
2967 | && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2)) | |
2968 | && INSN_CUID (added_links_insn) < INSN_CUID (i3)) | |
2969 | return added_links_insn; | |
2970 | else | |
2971 | return newi2pat ? i2 : i3; | |
230d793d RS |
2972 | } |
2973 | \f | |
2974 | /* Undo all the modifications recorded in undobuf. */ | |
2975 | ||
2976 | static void | |
79a490a9 | 2977 | undo_all (void) |
230d793d | 2978 | { |
241cea85 RK |
2979 | struct undo *undo, *next; |
2980 | ||
2981 | for (undo = undobuf.undos; undo; undo = next) | |
7c046e4e | 2982 | { |
241cea85 RK |
2983 | next = undo->next; |
2984 | if (undo->is_int) | |
2985 | *undo->where.i = undo->old_contents.i; | |
7c046e4e | 2986 | else |
241cea85 RK |
2987 | *undo->where.r = undo->old_contents.r; |
2988 | ||
2989 | undo->next = undobuf.frees; | |
2990 | undobuf.frees = undo; | |
7c046e4e | 2991 | } |
230d793d | 2992 | |
f1c6ba8b | 2993 | undobuf.undos = 0; |
230d793d | 2994 | } |
e7749837 RH |
2995 | |
2996 | /* We've committed to accepting the changes we made. Move all | |
2997 | of the undos to the free list. */ | |
2998 | ||
2999 | static void | |
79a490a9 | 3000 | undo_commit (void) |
e7749837 RH |
3001 | { |
3002 | struct undo *undo, *next; | |
3003 | ||
3004 | for (undo = undobuf.undos; undo; undo = next) | |
3005 | { | |
3006 | next = undo->next; | |
3007 | undo->next = undobuf.frees; | |
3008 | undobuf.frees = undo; | |
3009 | } | |
f1c6ba8b | 3010 | undobuf.undos = 0; |
e7749837 RH |
3011 | } |
3012 | ||
230d793d RS |
3013 | \f |
3014 | /* Find the innermost point within the rtx at LOC, possibly LOC itself, | |
d0ab8cd3 RK |
3015 | where we have an arithmetic expression and return that point. LOC will |
3016 | be inside INSN. | |
230d793d RS |
3017 | |
3018 | try_combine will call this function to see if an insn can be split into | |
3019 | two insns. */ | |
3020 | ||
3021 | static rtx * | |
79a490a9 | 3022 | find_split_point (rtx *loc, rtx insn) |
230d793d RS |
3023 | { |
3024 | rtx x = *loc; | |
3025 | enum rtx_code code = GET_CODE (x); | |
3026 | rtx *split; | |
770ae6cc RK |
3027 | unsigned HOST_WIDE_INT len = 0; |
3028 | HOST_WIDE_INT pos = 0; | |
3029 | int unsignedp = 0; | |
6a651371 | 3030 | rtx inner = NULL_RTX; |
230d793d RS |
3031 | |
3032 | /* First special-case some codes. */ | |
3033 | switch (code) | |
3034 | { | |
3035 | case SUBREG: | |
3036 | #ifdef INSN_SCHEDULING | |
3037 | /* If we are making a paradoxical SUBREG invalid, it becomes a split | |
3038 | point. */ | |
3c0cb5de | 3039 | if (MEM_P (SUBREG_REG (x))) |
230d793d RS |
3040 | return loc; |
3041 | #endif | |
d0ab8cd3 | 3042 | return find_split_point (&SUBREG_REG (x), insn); |
230d793d | 3043 | |
230d793d | 3044 | case MEM: |
916f14f1 | 3045 | #ifdef HAVE_lo_sum |
230d793d RS |
3046 | /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it |
3047 | using LO_SUM and HIGH. */ | |
3048 | if (GET_CODE (XEXP (x, 0)) == CONST | |
3049 | || GET_CODE (XEXP (x, 0)) == SYMBOL_REF) | |
3050 | { | |
3051 | SUBST (XEXP (x, 0), | |
f1c6ba8b RK |
3052 | gen_rtx_LO_SUM (Pmode, |
3053 | gen_rtx_HIGH (Pmode, XEXP (x, 0)), | |
3054 | XEXP (x, 0))); | |
230d793d RS |
3055 | return &XEXP (XEXP (x, 0), 0); |
3056 | } | |
230d793d RS |
3057 | #endif |
3058 | ||
916f14f1 RK |
3059 | /* If we have a PLUS whose second operand is a constant and the |
3060 | address is not valid, perhaps will can split it up using | |
3061 | the machine-specific way to split large constants. We use | |
ddd5a7c1 | 3062 | the first pseudo-reg (one of the virtual regs) as a placeholder; |
916f14f1 RK |
3063 | it will not remain in the result. */ |
3064 | if (GET_CODE (XEXP (x, 0)) == PLUS | |
3065 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
3066 | && ! memory_address_p (GET_MODE (x), XEXP (x, 0))) | |
3067 | { | |
3068 | rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER]; | |
38a448ca | 3069 | rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)), |
916f14f1 RK |
3070 | subst_insn); |
3071 | ||
3072 | /* This should have produced two insns, each of which sets our | |
3073 | placeholder. If the source of the second is a valid address, | |
3074 | we can make put both sources together and make a split point | |
3075 | in the middle. */ | |
3076 | ||
2f937369 DM |
3077 | if (seq |
3078 | && NEXT_INSN (seq) != NULL_RTX | |
3079 | && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX | |
4b4bf941 | 3080 | && NONJUMP_INSN_P (seq) |
2f937369 DM |
3081 | && GET_CODE (PATTERN (seq)) == SET |
3082 | && SET_DEST (PATTERN (seq)) == reg | |
916f14f1 | 3083 | && ! reg_mentioned_p (reg, |
2f937369 | 3084 | SET_SRC (PATTERN (seq))) |
4b4bf941 | 3085 | && NONJUMP_INSN_P (NEXT_INSN (seq)) |
2f937369 DM |
3086 | && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET |
3087 | && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg | |
916f14f1 | 3088 | && memory_address_p (GET_MODE (x), |
2f937369 | 3089 | SET_SRC (PATTERN (NEXT_INSN (seq))))) |
916f14f1 | 3090 | { |
2f937369 DM |
3091 | rtx src1 = SET_SRC (PATTERN (seq)); |
3092 | rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq))); | |
916f14f1 RK |
3093 | |
3094 | /* Replace the placeholder in SRC2 with SRC1. If we can | |
3095 | find where in SRC2 it was placed, that can become our | |
3096 | split point and we can replace this address with SRC2. | |
3097 | Just try two obvious places. */ | |
3098 | ||
3099 | src2 = replace_rtx (src2, reg, src1); | |
3100 | split = 0; | |
3101 | if (XEXP (src2, 0) == src1) | |
3102 | split = &XEXP (src2, 0); | |
3103 | else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e' | |
3104 | && XEXP (XEXP (src2, 0), 0) == src1) | |
3105 | split = &XEXP (XEXP (src2, 0), 0); | |
3106 | ||
3107 | if (split) | |
3108 | { | |
3109 | SUBST (XEXP (x, 0), src2); | |
3110 | return split; | |
3111 | } | |
3112 | } | |
663522cb | 3113 | |
1a26b032 RK |
3114 | /* If that didn't work, perhaps the first operand is complex and |
3115 | needs to be computed separately, so make a split point there. | |
3116 | This will occur on machines that just support REG + CONST | |
3117 | and have a constant moved through some previous computation. */ | |
3118 | ||
ec8e098d | 3119 | else if (!OBJECT_P (XEXP (XEXP (x, 0), 0)) |
1a26b032 | 3120 | && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG |
ec8e098d | 3121 | && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0))))) |
1a26b032 | 3122 | return &XEXP (XEXP (x, 0), 0); |
916f14f1 RK |
3123 | } |
3124 | break; | |
3125 | ||
230d793d RS |
3126 | case SET: |
3127 | #ifdef HAVE_cc0 | |
3128 | /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a | |
3129 | ZERO_EXTRACT, the most likely reason why this doesn't match is that | |
3130 | we need to put the operand into a register. So split at that | |
3131 | point. */ | |
3132 | ||
3133 | if (SET_DEST (x) == cc0_rtx | |
3134 | && GET_CODE (SET_SRC (x)) != COMPARE | |
3135 | && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT | |
ec8e098d | 3136 | && !OBJECT_P (SET_SRC (x)) |
230d793d | 3137 | && ! (GET_CODE (SET_SRC (x)) == SUBREG |
ec8e098d | 3138 | && OBJECT_P (SUBREG_REG (SET_SRC (x))))) |
230d793d RS |
3139 | return &SET_SRC (x); |
3140 | #endif | |
3141 | ||
3142 | /* See if we can split SET_SRC as it stands. */ | |
d0ab8cd3 | 3143 | split = find_split_point (&SET_SRC (x), insn); |
230d793d RS |
3144 | if (split && split != &SET_SRC (x)) |
3145 | return split; | |
3146 | ||
041d7180 JL |
3147 | /* See if we can split SET_DEST as it stands. */ |
3148 | split = find_split_point (&SET_DEST (x), insn); | |
3149 | if (split && split != &SET_DEST (x)) | |
3150 | return split; | |
3151 | ||
230d793d RS |
3152 | /* See if this is a bitfield assignment with everything constant. If |
3153 | so, this is an IOR of an AND, so split it into that. */ | |
3154 | if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT | |
3155 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))) | |
5f4f0e22 | 3156 | <= HOST_BITS_PER_WIDE_INT) |
230d793d RS |
3157 | && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT |
3158 | && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT | |
3159 | && GET_CODE (SET_SRC (x)) == CONST_INT | |
3160 | && ((INTVAL (XEXP (SET_DEST (x), 1)) | |
cf0d9408 | 3161 | + INTVAL (XEXP (SET_DEST (x), 2))) |
230d793d RS |
3162 | <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))) |
3163 | && ! side_effects_p (XEXP (SET_DEST (x), 0))) | |
3164 | { | |
770ae6cc RK |
3165 | HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2)); |
3166 | unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1)); | |
3167 | unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x)); | |
230d793d RS |
3168 | rtx dest = XEXP (SET_DEST (x), 0); |
3169 | enum machine_mode mode = GET_MODE (dest); | |
5f4f0e22 | 3170 | unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1; |
230d793d | 3171 | |
f76b9db2 ILT |
3172 | if (BITS_BIG_ENDIAN) |
3173 | pos = GET_MODE_BITSIZE (mode) - len - pos; | |
230d793d | 3174 | |
770ae6cc | 3175 | if (src == mask) |
230d793d | 3176 | SUBST (SET_SRC (x), |
1999435c | 3177 | gen_binary (IOR, mode, dest, GEN_INT (src << pos))); |
230d793d | 3178 | else |
1999435c PB |
3179 | SUBST (SET_SRC (x), |
3180 | gen_binary (IOR, mode, | |
3181 | gen_binary (AND, mode, dest, | |
3182 | gen_int_mode (~(mask << pos), | |
3183 | mode)), | |
3184 | GEN_INT (src << pos))); | |
230d793d RS |
3185 | |
3186 | SUBST (SET_DEST (x), dest); | |
3187 | ||
d0ab8cd3 | 3188 | split = find_split_point (&SET_SRC (x), insn); |
230d793d RS |
3189 | if (split && split != &SET_SRC (x)) |
3190 | return split; | |
3191 | } | |
3192 | ||
3193 | /* Otherwise, see if this is an operation that we can split into two. | |
3194 | If so, try to split that. */ | |
3195 | code = GET_CODE (SET_SRC (x)); | |
3196 | ||
3197 | switch (code) | |
3198 | { | |
d0ab8cd3 RK |
3199 | case AND: |
3200 | /* If we are AND'ing with a large constant that is only a single | |
3201 | bit and the result is only being used in a context where we | |
da7d8304 | 3202 | need to know if it is zero or nonzero, replace it with a bit |
d0ab8cd3 RK |
3203 | extraction. This will avoid the large constant, which might |
3204 | have taken more than one insn to make. If the constant were | |
3205 | not a valid argument to the AND but took only one insn to make, | |
3206 | this is no worse, but if it took more than one insn, it will | |
3207 | be better. */ | |
3208 | ||
3209 | if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT | |
f8cfc6aa | 3210 | && REG_P (XEXP (SET_SRC (x), 0)) |
d0ab8cd3 | 3211 | && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7 |
f8cfc6aa | 3212 | && REG_P (SET_DEST (x)) |
cf0d9408 | 3213 | && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0 |
d0ab8cd3 RK |
3214 | && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE) |
3215 | && XEXP (*split, 0) == SET_DEST (x) | |
3216 | && XEXP (*split, 1) == const0_rtx) | |
3217 | { | |
76184def DE |
3218 | rtx extraction = make_extraction (GET_MODE (SET_DEST (x)), |
3219 | XEXP (SET_SRC (x), 0), | |
3220 | pos, NULL_RTX, 1, 1, 0, 0); | |
3221 | if (extraction != 0) | |
3222 | { | |
3223 | SUBST (SET_SRC (x), extraction); | |
3224 | return find_split_point (loc, insn); | |
3225 | } | |
d0ab8cd3 RK |
3226 | } |
3227 | break; | |
3228 | ||
1a6ec070 | 3229 | case NE: |
938d968e | 3230 | /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X |
ec5c56db | 3231 | is known to be on, this can be converted into a NEG of a shift. */ |
1a6ec070 RK |
3232 | if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx |
3233 | && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0)) | |
4eb2cb10 | 3234 | && 1 <= (pos = exact_log2 |
1a6ec070 RK |
3235 | (nonzero_bits (XEXP (SET_SRC (x), 0), |
3236 | GET_MODE (XEXP (SET_SRC (x), 0)))))) | |
3237 | { | |
3238 | enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0)); | |
3239 | ||
3240 | SUBST (SET_SRC (x), | |
f1c6ba8b RK |
3241 | gen_rtx_NEG (mode, |
3242 | gen_rtx_LSHIFTRT (mode, | |
3243 | XEXP (SET_SRC (x), 0), | |
3244 | GEN_INT (pos)))); | |
1a6ec070 RK |
3245 | |
3246 | split = find_split_point (&SET_SRC (x), insn); | |
3247 | if (split && split != &SET_SRC (x)) | |
3248 | return split; | |
3249 | } | |
3250 | break; | |
3251 | ||
230d793d RS |
3252 | case SIGN_EXTEND: |
3253 | inner = XEXP (SET_SRC (x), 0); | |
101c1a3d JL |
3254 | |
3255 | /* We can't optimize if either mode is a partial integer | |
3256 | mode as we don't know how many bits are significant | |
3257 | in those modes. */ | |
3258 | if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT | |
3259 | || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT) | |
3260 | break; | |
3261 | ||
230d793d RS |
3262 | pos = 0; |
3263 | len = GET_MODE_BITSIZE (GET_MODE (inner)); | |
3264 | unsignedp = 0; | |
3265 | break; | |
3266 | ||
3267 | case SIGN_EXTRACT: | |
3268 | case ZERO_EXTRACT: | |
3269 | if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT | |
3270 | && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT) | |
3271 | { | |
3272 | inner = XEXP (SET_SRC (x), 0); | |
3273 | len = INTVAL (XEXP (SET_SRC (x), 1)); | |
3274 | pos = INTVAL (XEXP (SET_SRC (x), 2)); | |
3275 | ||
f76b9db2 ILT |
3276 | if (BITS_BIG_ENDIAN) |
3277 | pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos; | |
230d793d RS |
3278 | unsignedp = (code == ZERO_EXTRACT); |
3279 | } | |
3280 | break; | |
e9a25f70 JL |
3281 | |
3282 | default: | |
3283 | break; | |
230d793d RS |
3284 | } |
3285 | ||
3286 | if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner))) | |
3287 | { | |
3288 | enum machine_mode mode = GET_MODE (SET_SRC (x)); | |
3289 | ||
d0ab8cd3 RK |
3290 | /* For unsigned, we have a choice of a shift followed by an |
3291 | AND or two shifts. Use two shifts for field sizes where the | |
3292 | constant might be too large. We assume here that we can | |
3293 | always at least get 8-bit constants in an AND insn, which is | |
3294 | true for every current RISC. */ | |
3295 | ||
3296 | if (unsignedp && len <= 8) | |
230d793d RS |
3297 | { |
3298 | SUBST (SET_SRC (x), | |
f1c6ba8b RK |
3299 | gen_rtx_AND (mode, |
3300 | gen_rtx_LSHIFTRT | |
4de249d9 | 3301 | (mode, gen_lowpart (mode, inner), |
f1c6ba8b RK |
3302 | GEN_INT (pos)), |
3303 | GEN_INT (((HOST_WIDE_INT) 1 << len) - 1))); | |
230d793d | 3304 | |
d0ab8cd3 | 3305 | split = find_split_point (&SET_SRC (x), insn); |
230d793d RS |
3306 | if (split && split != &SET_SRC (x)) |
3307 | return split; | |
3308 | } | |
3309 | else | |
3310 | { | |
3311 | SUBST (SET_SRC (x), | |
f1c6ba8b | 3312 | gen_rtx_fmt_ee |
d0ab8cd3 | 3313 | (unsignedp ? LSHIFTRT : ASHIFTRT, mode, |
f1c6ba8b | 3314 | gen_rtx_ASHIFT (mode, |
4de249d9 | 3315 | gen_lowpart (mode, inner), |
f1c6ba8b RK |
3316 | GEN_INT (GET_MODE_BITSIZE (mode) |
3317 | - len - pos)), | |
5f4f0e22 | 3318 | GEN_INT (GET_MODE_BITSIZE (mode) - len))); |
230d793d | 3319 | |
d0ab8cd3 | 3320 | split = find_split_point (&SET_SRC (x), insn); |
230d793d RS |
3321 | if (split && split != &SET_SRC (x)) |
3322 | return split; | |
3323 | } | |
3324 | } | |
3325 | ||
3326 | /* See if this is a simple operation with a constant as the second | |
3327 | operand. It might be that this constant is out of range and hence | |
3328 | could be used as a split point. */ | |
ec8e098d | 3329 | if (BINARY_P (SET_SRC (x)) |
230d793d | 3330 | && CONSTANT_P (XEXP (SET_SRC (x), 1)) |
ec8e098d | 3331 | && (OBJECT_P (XEXP (SET_SRC (x), 0)) |
230d793d | 3332 | || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG |
ec8e098d | 3333 | && OBJECT_P (SUBREG_REG (XEXP (SET_SRC (x), 0)))))) |
230d793d RS |
3334 | return &XEXP (SET_SRC (x), 1); |
3335 | ||
3336 | /* Finally, see if this is a simple operation with its first operand | |
3337 | not in a register. The operation might require this operand in a | |
3338 | register, so return it as a split point. We can always do this | |
3339 | because if the first operand were another operation, we would have | |
3340 | already found it as a split point. */ | |
ec8e098d | 3341 | if ((BINARY_P (SET_SRC (x)) || UNARY_P (SET_SRC (x))) |
230d793d RS |
3342 | && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode)) |
3343 | return &XEXP (SET_SRC (x), 0); | |
3344 | ||
3345 | return 0; | |
3346 | ||
3347 | case AND: | |
3348 | case IOR: | |
3349 | /* We write NOR as (and (not A) (not B)), but if we don't have a NOR, | |
3350 | it is better to write this as (not (ior A B)) so we can split it. | |
3351 | Similarly for IOR. */ | |
3352 | if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT) | |
3353 | { | |
3354 | SUBST (*loc, | |
f1c6ba8b RK |
3355 | gen_rtx_NOT (GET_MODE (x), |
3356 | gen_rtx_fmt_ee (code == IOR ? AND : IOR, | |
3357 | GET_MODE (x), | |
3358 | XEXP (XEXP (x, 0), 0), | |
3359 | XEXP (XEXP (x, 1), 0)))); | |
d0ab8cd3 | 3360 | return find_split_point (loc, insn); |
230d793d RS |
3361 | } |
3362 | ||
3363 | /* Many RISC machines have a large set of logical insns. If the | |
3364 | second operand is a NOT, put it first so we will try to split the | |
3365 | other operand first. */ | |
3366 | if (GET_CODE (XEXP (x, 1)) == NOT) | |
3367 | { | |
3368 | rtx tem = XEXP (x, 0); | |
3369 | SUBST (XEXP (x, 0), XEXP (x, 1)); | |
3370 | SUBST (XEXP (x, 1), tem); | |
3371 | } | |
3372 | break; | |
e9a25f70 JL |
3373 | |
3374 | default: | |
3375 | break; | |
230d793d RS |
3376 | } |
3377 | ||
3378 | /* Otherwise, select our actions depending on our rtx class. */ | |
3379 | switch (GET_RTX_CLASS (code)) | |
3380 | { | |
ec8e098d PB |
3381 | case RTX_BITFIELD_OPS: /* This is ZERO_EXTRACT and SIGN_EXTRACT. */ |
3382 | case RTX_TERNARY: | |
d0ab8cd3 | 3383 | split = find_split_point (&XEXP (x, 2), insn); |
230d793d RS |
3384 | if (split) |
3385 | return split; | |
0f41302f | 3386 | /* ... fall through ... */ |
ec8e098d PB |
3387 | case RTX_BIN_ARITH: |
3388 | case RTX_COMM_ARITH: | |
3389 | case RTX_COMPARE: | |
3390 | case RTX_COMM_COMPARE: | |
d0ab8cd3 | 3391 | split = find_split_point (&XEXP (x, 1), insn); |
230d793d RS |
3392 | if (split) |
3393 | return split; | |
0f41302f | 3394 | /* ... fall through ... */ |
ec8e098d | 3395 | case RTX_UNARY: |
230d793d RS |
3396 | /* Some machines have (and (shift ...) ...) insns. If X is not |
3397 | an AND, but XEXP (X, 0) is, use it as our split point. */ | |
3398 | if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND) | |
3399 | return &XEXP (x, 0); | |
3400 | ||
d0ab8cd3 | 3401 | split = find_split_point (&XEXP (x, 0), insn); |
230d793d RS |
3402 | if (split) |
3403 | return split; | |
3404 | return loc; | |
230d793d | 3405 | |
ec8e098d PB |
3406 | default: |
3407 | /* Otherwise, we don't have a split point. */ | |
3408 | return 0; | |
3409 | } | |
230d793d RS |
3410 | } |
3411 | \f | |
3412 | /* Throughout X, replace FROM with TO, and return the result. | |
3413 | The result is TO if X is FROM; | |
3414 | otherwise the result is X, but its contents may have been modified. | |
3415 | If they were modified, a record was made in undobuf so that | |
3416 | undo_all will (among other things) return X to its original state. | |
3417 | ||
3418 | If the number of changes necessary is too much to record to undo, | |
3419 | the excess changes are not made, so the result is invalid. | |
3420 | The changes already made can still be undone. | |
3421 | undobuf.num_undo is incremented for such changes, so by testing that | |
3422 | the caller can tell whether the result is valid. | |
3423 | ||
3424 | `n_occurrences' is incremented each time FROM is replaced. | |
663522cb | 3425 | |
da7d8304 | 3426 | IN_DEST is nonzero if we are processing the SET_DEST of a SET. |
230d793d | 3427 | |
da7d8304 KH |
3428 | UNIQUE_COPY is nonzero if each substitution must be unique. We do this |
3429 | by copying if `n_occurrences' is nonzero. */ | |
230d793d RS |
3430 | |
3431 | static rtx | |
79a490a9 | 3432 | subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy) |
230d793d | 3433 | { |
b3694847 | 3434 | enum rtx_code code = GET_CODE (x); |
230d793d | 3435 | enum machine_mode op0_mode = VOIDmode; |
b3694847 SS |
3436 | const char *fmt; |
3437 | int len, i; | |
8079805d | 3438 | rtx new; |
230d793d RS |
3439 | |
3440 | /* Two expressions are equal if they are identical copies of a shared | |
3441 | RTX or if they are both registers with the same register number | |
3442 | and mode. */ | |
3443 | ||
3444 | #define COMBINE_RTX_EQUAL_P(X,Y) \ | |
3445 | ((X) == (Y) \ | |
f8cfc6aa | 3446 | || (REG_P (X) && REG_P (Y) \ |
230d793d RS |
3447 | && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y))) |
3448 | ||
3449 | if (! in_dest && COMBINE_RTX_EQUAL_P (x, from)) | |
3450 | { | |
3451 | n_occurrences++; | |
3452 | return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to); | |
3453 | } | |
3454 | ||
3455 | /* If X and FROM are the same register but different modes, they will | |
663522cb | 3456 | not have been seen as equal above. However, flow.c will make a |
230d793d RS |
3457 | LOG_LINKS entry for that case. If we do nothing, we will try to |
3458 | rerecognize our original insn and, when it succeeds, we will | |
3459 | delete the feeding insn, which is incorrect. | |
3460 | ||
3461 | So force this insn not to match in this (rare) case. */ | |
f8cfc6aa | 3462 | if (! in_dest && code == REG && REG_P (from) |
230d793d | 3463 | && REGNO (x) == REGNO (from)) |
38a448ca | 3464 | return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); |
230d793d RS |
3465 | |
3466 | /* If this is an object, we are done unless it is a MEM or LO_SUM, both | |
3467 | of which may contain things that can be combined. */ | |
ec8e098d | 3468 | if (code != MEM && code != LO_SUM && OBJECT_P (x)) |
230d793d RS |
3469 | return x; |
3470 | ||
3471 | /* It is possible to have a subexpression appear twice in the insn. | |
3472 | Suppose that FROM is a register that appears within TO. | |
3473 | Then, after that subexpression has been scanned once by `subst', | |
3474 | the second time it is scanned, TO may be found. If we were | |
3475 | to scan TO here, we would find FROM within it and create a | |
3476 | self-referent rtl structure which is completely wrong. */ | |
3477 | if (COMBINE_RTX_EQUAL_P (x, to)) | |
3478 | return to; | |
3479 | ||
4f4b3679 RH |
3480 | /* Parallel asm_operands need special attention because all of the |
3481 | inputs are shared across the arms. Furthermore, unsharing the | |
3482 | rtl results in recognition failures. Failure to handle this case | |
3483 | specially can result in circular rtl. | |
3484 | ||
3485 | Solve this by doing a normal pass across the first entry of the | |
3486 | parallel, and only processing the SET_DESTs of the subsequent | |
3487 | entries. Ug. */ | |
3488 | ||
3489 | if (code == PARALLEL | |
3490 | && GET_CODE (XVECEXP (x, 0, 0)) == SET | |
3491 | && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS) | |
230d793d | 3492 | { |
4f4b3679 RH |
3493 | new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy); |
3494 | ||
3495 | /* If this substitution failed, this whole thing fails. */ | |
3496 | if (GET_CODE (new) == CLOBBER | |
3497 | && XEXP (new, 0) == const0_rtx) | |
3498 | return new; | |
3499 | ||
3500 | SUBST (XVECEXP (x, 0, 0), new); | |
3501 | ||
3502 | for (i = XVECLEN (x, 0) - 1; i >= 1; i--) | |
230d793d | 3503 | { |
4f4b3679 | 3504 | rtx dest = SET_DEST (XVECEXP (x, 0, i)); |
663522cb | 3505 | |
f8cfc6aa | 3506 | if (!REG_P (dest) |
4f4b3679 RH |
3507 | && GET_CODE (dest) != CC0 |
3508 | && GET_CODE (dest) != PC) | |
230d793d | 3509 | { |
4f4b3679 | 3510 | new = subst (dest, from, to, 0, unique_copy); |
230d793d | 3511 | |
4f4b3679 RH |
3512 | /* If this substitution failed, this whole thing fails. */ |
3513 | if (GET_CODE (new) == CLOBBER | |
3514 | && XEXP (new, 0) == const0_rtx) | |
3515 | return new; | |
230d793d | 3516 | |
4f4b3679 | 3517 | SUBST (SET_DEST (XVECEXP (x, 0, i)), new); |
230d793d RS |
3518 | } |
3519 | } | |
4f4b3679 RH |
3520 | } |
3521 | else | |
3522 | { | |
3523 | len = GET_RTX_LENGTH (code); | |
3524 | fmt = GET_RTX_FORMAT (code); | |
3525 | ||
3526 | /* We don't need to process a SET_DEST that is a register, CC0, | |
3527 | or PC, so set up to skip this common case. All other cases | |
3528 | where we want to suppress replacing something inside a | |
3529 | SET_SRC are handled via the IN_DEST operand. */ | |
3530 | if (code == SET | |
f8cfc6aa | 3531 | && (REG_P (SET_DEST (x)) |
4f4b3679 RH |
3532 | || GET_CODE (SET_DEST (x)) == CC0 |
3533 | || GET_CODE (SET_DEST (x)) == PC)) | |
3534 | fmt = "ie"; | |
3535 | ||
3536 | /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a | |
3537 | constant. */ | |
3538 | if (fmt[0] == 'e') | |
3539 | op0_mode = GET_MODE (XEXP (x, 0)); | |
3540 | ||
3541 | for (i = 0; i < len; i++) | |
230d793d | 3542 | { |
4f4b3679 | 3543 | if (fmt[i] == 'E') |
230d793d | 3544 | { |
b3694847 | 3545 | int j; |
4f4b3679 RH |
3546 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
3547 | { | |
3548 | if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from)) | |
3549 | { | |
3550 | new = (unique_copy && n_occurrences | |
3551 | ? copy_rtx (to) : to); | |
3552 | n_occurrences++; | |
3553 | } | |
3554 | else | |
3555 | { | |
3556 | new = subst (XVECEXP (x, i, j), from, to, 0, | |
3557 | unique_copy); | |
3558 | ||
3559 | /* If this substitution failed, this whole thing | |
3560 | fails. */ | |
3561 | if (GET_CODE (new) == CLOBBER | |
3562 | && XEXP (new, 0) == const0_rtx) | |
3563 | return new; | |
3564 | } | |
3565 | ||
3566 | SUBST (XVECEXP (x, i, j), new); | |
3567 | } | |
3568 | } | |
3569 | else if (fmt[i] == 'e') | |
3570 | { | |
0a33d11e RH |
3571 | /* If this is a register being set, ignore it. */ |
3572 | new = XEXP (x, i); | |
3573 | if (in_dest | |
0a33d11e | 3574 | && i == 0 |
b78b8bd8 JJ |
3575 | && (((code == SUBREG || code == ZERO_EXTRACT) |
3576 | && REG_P (new)) | |
3577 | || code == STRICT_LOW_PART)) | |
0a33d11e RH |
3578 | ; |
3579 | ||
3580 | else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from)) | |
4f4b3679 RH |
3581 | { |
3582 | /* In general, don't install a subreg involving two | |
3583 | modes not tieable. It can worsen register | |
3584 | allocation, and can even make invalid reload | |
3585 | insns, since the reg inside may need to be copied | |
3586 | from in the outside mode, and that may be invalid | |
3587 | if it is an fp reg copied in integer mode. | |
3588 | ||
3589 | We allow two exceptions to this: It is valid if | |
3590 | it is inside another SUBREG and the mode of that | |
3591 | SUBREG and the mode of the inside of TO is | |
3592 | tieable and it is valid if X is a SET that copies | |
3593 | FROM to CC0. */ | |
3594 | ||
3595 | if (GET_CODE (to) == SUBREG | |
3596 | && ! MODES_TIEABLE_P (GET_MODE (to), | |
3597 | GET_MODE (SUBREG_REG (to))) | |
3598 | && ! (code == SUBREG | |
3599 | && MODES_TIEABLE_P (GET_MODE (x), | |
3600 | GET_MODE (SUBREG_REG (to)))) | |
42301240 | 3601 | #ifdef HAVE_cc0 |
4f4b3679 | 3602 | && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx) |
42301240 | 3603 | #endif |
4f4b3679 RH |
3604 | ) |
3605 | return gen_rtx_CLOBBER (VOIDmode, const0_rtx); | |
42301240 | 3606 | |
cff9f8d5 | 3607 | #ifdef CANNOT_CHANGE_MODE_CLASS |
ed8afe3a | 3608 | if (code == SUBREG |
f8cfc6aa | 3609 | && REG_P (to) |
ed8afe3a | 3610 | && REGNO (to) < FIRST_PSEUDO_REGISTER |
cff9f8d5 AH |
3611 | && REG_CANNOT_CHANGE_MODE_P (REGNO (to), |
3612 | GET_MODE (to), | |
3613 | GET_MODE (x))) | |
ed8afe3a GK |
3614 | return gen_rtx_CLOBBER (VOIDmode, const0_rtx); |
3615 | #endif | |
3616 | ||
4f4b3679 RH |
3617 | new = (unique_copy && n_occurrences ? copy_rtx (to) : to); |
3618 | n_occurrences++; | |
3619 | } | |
3620 | else | |
3621 | /* If we are in a SET_DEST, suppress most cases unless we | |
3622 | have gone inside a MEM, in which case we want to | |
3623 | simplify the address. We assume here that things that | |
3624 | are actually part of the destination have their inner | |
663522cb | 3625 | parts in the first expression. This is true for SUBREG, |
4f4b3679 RH |
3626 | STRICT_LOW_PART, and ZERO_EXTRACT, which are the only |
3627 | things aside from REG and MEM that should appear in a | |
3628 | SET_DEST. */ | |
3629 | new = subst (XEXP (x, i), from, to, | |
3630 | (((in_dest | |
3631 | && (code == SUBREG || code == STRICT_LOW_PART | |
3632 | || code == ZERO_EXTRACT)) | |
3633 | || code == SET) | |
3634 | && i == 0), unique_copy); | |
3635 | ||
3636 | /* If we found that we will have to reject this combination, | |
3637 | indicate that by returning the CLOBBER ourselves, rather than | |
3638 | an expression containing it. This will speed things up as | |
3639 | well as prevent accidents where two CLOBBERs are considered | |
3640 | to be equal, thus producing an incorrect simplification. */ | |
3641 | ||
3642 | if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx) | |
3643 | return new; | |
3644 | ||
cc8c96fd RS |
3645 | if (GET_CODE (x) == SUBREG |
3646 | && (GET_CODE (new) == CONST_INT | |
3647 | || GET_CODE (new) == CONST_DOUBLE)) | |
4161da12 | 3648 | { |
b0dd4808 | 3649 | enum machine_mode mode = GET_MODE (x); |
2e676d78 | 3650 | |
4161da12 AO |
3651 | x = simplify_subreg (GET_MODE (x), new, |
3652 | GET_MODE (SUBREG_REG (x)), | |
3653 | SUBREG_BYTE (x)); | |
3654 | if (! x) | |
b0dd4808 | 3655 | x = gen_rtx_CLOBBER (mode, const0_rtx); |
4161da12 AO |
3656 | } |
3657 | else if (GET_CODE (new) == CONST_INT | |
3658 | && GET_CODE (x) == ZERO_EXTEND) | |
3659 | { | |
3660 | x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x), | |
3661 | new, GET_MODE (XEXP (x, 0))); | |
341c100f | 3662 | gcc_assert (x); |
4161da12 AO |
3663 | } |
3664 | else | |
3665 | SUBST (XEXP (x, i), new); | |
230d793d | 3666 | } |
230d793d RS |
3667 | } |
3668 | } | |
3669 | ||
8079805d RK |
3670 | /* Try to simplify X. If the simplification changed the code, it is likely |
3671 | that further simplification will help, so loop, but limit the number | |
3672 | of repetitions that will be performed. */ | |
3673 | ||
3674 | for (i = 0; i < 4; i++) | |
3675 | { | |
3676 | /* If X is sufficiently simple, don't bother trying to do anything | |
3677 | with it. */ | |
3678 | if (code != CONST_INT && code != REG && code != CLOBBER) | |
6621d78e | 3679 | x = combine_simplify_rtx (x, op0_mode, in_dest); |
d0ab8cd3 | 3680 | |
8079805d RK |
3681 | if (GET_CODE (x) == code) |
3682 | break; | |
d0ab8cd3 | 3683 | |
8079805d | 3684 | code = GET_CODE (x); |
eeb43d32 | 3685 | |
8079805d RK |
3686 | /* We no longer know the original mode of operand 0 since we |
3687 | have changed the form of X) */ | |
3688 | op0_mode = VOIDmode; | |
3689 | } | |
eeb43d32 | 3690 | |
8079805d RK |
3691 | return x; |
3692 | } | |
3693 | \f | |
3694 | /* Simplify X, a piece of RTL. We just operate on the expression at the | |
3695 | outer level; call `subst' to simplify recursively. Return the new | |
3696 | expression. | |
3697 | ||
6621d78e PB |
3698 | OP0_MODE is the original mode of XEXP (x, 0). IN_DEST is nonzero |
3699 | if we are inside a SET_DEST. */ | |
eeb43d32 | 3700 | |
8079805d | 3701 | static rtx |
6621d78e | 3702 | combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest) |
8079805d RK |
3703 | { |
3704 | enum rtx_code code = GET_CODE (x); | |
3705 | enum machine_mode mode = GET_MODE (x); | |
3706 | rtx temp; | |
9a915772 | 3707 | rtx reversed; |
8079805d | 3708 | int i; |
d0ab8cd3 | 3709 | |
230d793d RS |
3710 | /* If this is a commutative operation, put a constant last and a complex |
3711 | expression first. We don't need to do this for comparisons here. */ | |
ec8e098d | 3712 | if (COMMUTATIVE_ARITH_P (x) |
e5c56fd9 | 3713 | && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1))) |
230d793d RS |
3714 | { |
3715 | temp = XEXP (x, 0); | |
3716 | SUBST (XEXP (x, 0), XEXP (x, 1)); | |
3717 | SUBST (XEXP (x, 1), temp); | |
3718 | } | |
3719 | ||
22609cbf RK |
3720 | /* If this is a PLUS, MINUS, or MULT, and the first operand is the |
3721 | sign extension of a PLUS with a constant, reverse the order of the sign | |
3722 | extension and the addition. Note that this not the same as the original | |
3723 | code, but overflow is undefined for signed values. Also note that the | |
3724 | PLUS will have been partially moved "inside" the sign-extension, so that | |
3725 | the first operand of X will really look like: | |
3726 | (ashiftrt (plus (ashift A C4) C5) C4). | |
3727 | We convert this to | |
3728 | (plus (ashiftrt (ashift A C4) C2) C4) | |
3729 | and replace the first operand of X with that expression. Later parts | |
3730 | of this function may simplify the expression further. | |
3731 | ||
3732 | For example, if we start with (mult (sign_extend (plus A C1)) C2), | |
3733 | we swap the SIGN_EXTEND and PLUS. Later code will apply the | |
3734 | distributive law to produce (plus (mult (sign_extend X) C1) C3). | |
3735 | ||
3736 | We do this to simplify address expressions. */ | |
3737 | ||
3738 | if ((code == PLUS || code == MINUS || code == MULT) | |
3739 | && GET_CODE (XEXP (x, 0)) == ASHIFTRT | |
3740 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS | |
3741 | && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT | |
3742 | && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT | |
3743 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
3744 | && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1) | |
3745 | && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT | |
3746 | && (temp = simplify_binary_operation (ASHIFTRT, mode, | |
3747 | XEXP (XEXP (XEXP (x, 0), 0), 1), | |
3748 | XEXP (XEXP (x, 0), 1))) != 0) | |
3749 | { | |
3750 | rtx new | |
3751 | = simplify_shift_const (NULL_RTX, ASHIFT, mode, | |
3752 | XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0), | |
3753 | INTVAL (XEXP (XEXP (x, 0), 1))); | |
3754 | ||
3755 | new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new, | |
3756 | INTVAL (XEXP (XEXP (x, 0), 1))); | |
3757 | ||
1999435c | 3758 | SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp)); |
22609cbf RK |
3759 | } |
3760 | ||
663522cb | 3761 | /* If this is a simple operation applied to an IF_THEN_ELSE, try |
d0ab8cd3 | 3762 | applying it to the arms of the IF_THEN_ELSE. This often simplifies |
abe6e52f RK |
3763 | things. Check for cases where both arms are testing the same |
3764 | condition. | |
3765 | ||
3766 | Don't do anything if all operands are very simple. */ | |
3767 | ||
ec8e098d PB |
3768 | if ((BINARY_P (x) |
3769 | && ((!OBJECT_P (XEXP (x, 0)) | |
abe6e52f | 3770 | && ! (GET_CODE (XEXP (x, 0)) == SUBREG |
ec8e098d PB |
3771 | && OBJECT_P (SUBREG_REG (XEXP (x, 0))))) |
3772 | || (!OBJECT_P (XEXP (x, 1)) | |
abe6e52f | 3773 | && ! (GET_CODE (XEXP (x, 1)) == SUBREG |
ec8e098d PB |
3774 | && OBJECT_P (SUBREG_REG (XEXP (x, 1))))))) |
3775 | || (UNARY_P (x) | |
3776 | && (!OBJECT_P (XEXP (x, 0)) | |
abe6e52f | 3777 | && ! (GET_CODE (XEXP (x, 0)) == SUBREG |
ec8e098d | 3778 | && OBJECT_P (SUBREG_REG (XEXP (x, 0))))))) |
d0ab8cd3 | 3779 | { |
d6edb99e | 3780 | rtx cond, true_rtx, false_rtx; |
abe6e52f | 3781 | |
d6edb99e | 3782 | cond = if_then_else_cond (x, &true_rtx, &false_rtx); |
0802d516 RK |
3783 | if (cond != 0 |
3784 | /* If everything is a comparison, what we have is highly unlikely | |
3785 | to be simpler, so don't use it. */ | |
ec8e098d PB |
3786 | && ! (COMPARISON_P (x) |
3787 | && (COMPARISON_P (true_rtx) || COMPARISON_P (false_rtx)))) | |
abe6e52f RK |
3788 | { |
3789 | rtx cop1 = const0_rtx; | |
3790 | enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1); | |
3791 | ||
ec8e098d | 3792 | if (cond_code == NE && COMPARISON_P (cond)) |
15448afc RK |
3793 | return x; |
3794 | ||
663522cb | 3795 | /* Simplify the alternative arms; this may collapse the true and |
c6279378 UW |
3796 | false arms to store-flag values. Be careful to use copy_rtx |
3797 | here since true_rtx or false_rtx might share RTL with x as a | |
3798 | result of the if_then_else_cond call above. */ | |
3799 | true_rtx = subst (copy_rtx (true_rtx), pc_rtx, pc_rtx, 0, 0); | |
3800 | false_rtx = subst (copy_rtx (false_rtx), pc_rtx, pc_rtx, 0, 0); | |
9210df58 | 3801 | |
d6edb99e | 3802 | /* If true_rtx and false_rtx are not general_operands, an if_then_else |
085f1714 | 3803 | is unlikely to be simpler. */ |
d6edb99e ZW |
3804 | if (general_operand (true_rtx, VOIDmode) |
3805 | && general_operand (false_rtx, VOIDmode)) | |
085f1714 | 3806 | { |
434c87d4 JH |
3807 | enum rtx_code reversed; |
3808 | ||
085f1714 RH |
3809 | /* Restarting if we generate a store-flag expression will cause |
3810 | us to loop. Just drop through in this case. */ | |
3811 | ||
3812 | /* If the result values are STORE_FLAG_VALUE and zero, we can | |
3813 | just make the comparison operation. */ | |
d6edb99e | 3814 | if (true_rtx == const_true_rtx && false_rtx == const0_rtx) |
1999435c | 3815 | x = gen_binary (cond_code, mode, cond, cop1); |
fa4e13e0 | 3816 | else if (true_rtx == const0_rtx && false_rtx == const_true_rtx |
434c87d4 | 3817 | && ((reversed = reversed_comparison_code_parts |
79a490a9 | 3818 | (cond_code, cond, cop1, NULL)) |
434c87d4 | 3819 | != UNKNOWN)) |
1999435c | 3820 | x = gen_binary (reversed, mode, cond, cop1); |
085f1714 RH |
3821 | |
3822 | /* Likewise, we can make the negate of a comparison operation | |
3823 | if the result values are - STORE_FLAG_VALUE and zero. */ | |
d6edb99e ZW |
3824 | else if (GET_CODE (true_rtx) == CONST_INT |
3825 | && INTVAL (true_rtx) == - STORE_FLAG_VALUE | |
3826 | && false_rtx == const0_rtx) | |
f1c6ba8b | 3827 | x = simplify_gen_unary (NEG, mode, |
1999435c PB |
3828 | gen_binary (cond_code, mode, cond, |
3829 | cop1), | |
f1c6ba8b | 3830 | mode); |
d6edb99e ZW |
3831 | else if (GET_CODE (false_rtx) == CONST_INT |
3832 | && INTVAL (false_rtx) == - STORE_FLAG_VALUE | |
434c87d4 JH |
3833 | && true_rtx == const0_rtx |
3834 | && ((reversed = reversed_comparison_code_parts | |
79a490a9 | 3835 | (cond_code, cond, cop1, NULL)) |
434c87d4 | 3836 | != UNKNOWN)) |
f1c6ba8b | 3837 | x = simplify_gen_unary (NEG, mode, |
1999435c PB |
3838 | gen_binary (reversed, mode, |
3839 | cond, cop1), | |
f1c6ba8b | 3840 | mode); |
085f1714 RH |
3841 | else |
3842 | return gen_rtx_IF_THEN_ELSE (mode, | |
1999435c PB |
3843 | gen_binary (cond_code, VOIDmode, |
3844 | cond, cop1), | |
d6edb99e | 3845 | true_rtx, false_rtx); |
5109d49f | 3846 | |
085f1714 RH |
3847 | code = GET_CODE (x); |
3848 | op0_mode = VOIDmode; | |
3849 | } | |
abe6e52f | 3850 | } |
d0ab8cd3 RK |
3851 | } |
3852 | ||
230d793d RS |
3853 | /* Try to fold this expression in case we have constants that weren't |
3854 | present before. */ | |
3855 | temp = 0; | |
3856 | switch (GET_RTX_CLASS (code)) | |
3857 | { | |
ec8e098d | 3858 | case RTX_UNARY: |
c0657872 RS |
3859 | if (op0_mode == VOIDmode) |
3860 | op0_mode = GET_MODE (XEXP (x, 0)); | |
230d793d RS |
3861 | temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode); |
3862 | break; | |
ec8e098d PB |
3863 | case RTX_COMPARE: |
3864 | case RTX_COMM_COMPARE: | |
47b1e19b JH |
3865 | { |
3866 | enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0)); | |
3867 | if (cmp_mode == VOIDmode) | |
1cac8785 DD |
3868 | { |
3869 | cmp_mode = GET_MODE (XEXP (x, 1)); | |
3870 | if (cmp_mode == VOIDmode) | |
3871 | cmp_mode = op0_mode; | |
3872 | } | |
7ce3e360 | 3873 | temp = simplify_relational_operation (code, mode, cmp_mode, |
47b1e19b JH |
3874 | XEXP (x, 0), XEXP (x, 1)); |
3875 | } | |
230d793d | 3876 | break; |
ec8e098d PB |
3877 | case RTX_COMM_ARITH: |
3878 | case RTX_BIN_ARITH: | |
230d793d RS |
3879 | temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1)); |
3880 | break; | |
ec8e098d PB |
3881 | case RTX_BITFIELD_OPS: |
3882 | case RTX_TERNARY: | |
230d793d RS |
3883 | temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0), |
3884 | XEXP (x, 1), XEXP (x, 2)); | |
3885 | break; | |
ec8e098d PB |
3886 | default: |
3887 | break; | |
230d793d RS |
3888 | } |
3889 | ||
3890 | if (temp) | |
4531c1c7 DN |
3891 | { |
3892 | x = temp; | |
3893 | code = GET_CODE (temp); | |
3894 | op0_mode = VOIDmode; | |
3895 | mode = GET_MODE (temp); | |
3896 | } | |
230d793d | 3897 | |
230d793d | 3898 | /* First see if we can apply the inverse distributive law. */ |
224eeff2 RK |
3899 | if (code == PLUS || code == MINUS |
3900 | || code == AND || code == IOR || code == XOR) | |
230d793d RS |
3901 | { |
3902 | x = apply_distributive_law (x); | |
3903 | code = GET_CODE (x); | |
6e20204f | 3904 | op0_mode = VOIDmode; |
230d793d RS |
3905 | } |
3906 | ||
3907 | /* If CODE is an associative operation not otherwise handled, see if we | |
3908 | can associate some operands. This can win if they are constants or | |
e0e08ac2 | 3909 | if they are logically related (i.e. (a & b) & a). */ |
493efd37 TM |
3910 | if ((code == PLUS || code == MINUS || code == MULT || code == DIV |
3911 | || code == AND || code == IOR || code == XOR | |
230d793d | 3912 | || code == SMAX || code == SMIN || code == UMAX || code == UMIN) |
493efd37 | 3913 | && ((INTEGRAL_MODE_P (mode) && code != DIV) |
4ba5f925 | 3914 | || (flag_unsafe_math_optimizations && FLOAT_MODE_P (mode)))) |
230d793d RS |
3915 | { |
3916 | if (GET_CODE (XEXP (x, 0)) == code) | |
3917 | { | |
3918 | rtx other = XEXP (XEXP (x, 0), 0); | |
3919 | rtx inner_op0 = XEXP (XEXP (x, 0), 1); | |
3920 | rtx inner_op1 = XEXP (x, 1); | |
3921 | rtx inner; | |
663522cb | 3922 | |
230d793d RS |
3923 | /* Make sure we pass the constant operand if any as the second |
3924 | one if this is a commutative operation. */ | |
ec8e098d | 3925 | if (CONSTANT_P (inner_op0) && COMMUTATIVE_ARITH_P (x)) |
230d793d RS |
3926 | { |
3927 | rtx tem = inner_op0; | |
3928 | inner_op0 = inner_op1; | |
3929 | inner_op1 = tem; | |
3930 | } | |
3931 | inner = simplify_binary_operation (code == MINUS ? PLUS | |
3932 | : code == DIV ? MULT | |
230d793d RS |
3933 | : code, |
3934 | mode, inner_op0, inner_op1); | |
3935 | ||
3936 | /* For commutative operations, try the other pair if that one | |
3937 | didn't simplify. */ | |
ec8e098d | 3938 | if (inner == 0 && COMMUTATIVE_ARITH_P (x)) |
230d793d RS |
3939 | { |
3940 | other = XEXP (XEXP (x, 0), 1); | |
3941 | inner = simplify_binary_operation (code, mode, | |
3942 | XEXP (XEXP (x, 0), 0), | |
3943 | XEXP (x, 1)); | |
3944 | } | |
3945 | ||
3946 | if (inner) | |
1999435c | 3947 | return gen_binary (code, mode, other, inner); |
230d793d RS |
3948 | } |
3949 | } | |
3950 | ||
3951 | /* A little bit of algebraic simplification here. */ | |
3952 | switch (code) | |
3953 | { | |
3954 | case MEM: | |
3955 | /* Ensure that our address has any ASHIFTs converted to MULT in case | |
3956 | address-recognizing predicates are called later. */ | |
3957 | temp = make_compound_operation (XEXP (x, 0), MEM); | |
3958 | SUBST (XEXP (x, 0), temp); | |
3959 | break; | |
3960 | ||
3961 | case SUBREG: | |
eea50aa0 JH |
3962 | if (op0_mode == VOIDmode) |
3963 | op0_mode = GET_MODE (SUBREG_REG (x)); | |
230d793d | 3964 | |
4de249d9 | 3965 | /* See if this can be moved to simplify_subreg. */ |
3c99d5ff | 3966 | if (CONSTANT_P (SUBREG_REG (x)) |
156755ac | 3967 | && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x) |
4de249d9 | 3968 | /* Don't call gen_lowpart if the inner mode |
156755ac JJ |
3969 | is VOIDmode and we cannot simplify it, as SUBREG without |
3970 | inner mode is invalid. */ | |
3971 | && (GET_MODE (SUBREG_REG (x)) != VOIDmode | |
3972 | || gen_lowpart_common (mode, SUBREG_REG (x)))) | |
4de249d9 | 3973 | return gen_lowpart (mode, SUBREG_REG (x)); |
230d793d | 3974 | |
a13287e1 AM |
3975 | if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC) |
3976 | break; | |
eea50aa0 JH |
3977 | { |
3978 | rtx temp; | |
3979 | temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode, | |
23190837 | 3980 | SUBREG_BYTE (x)); |
eea50aa0 JH |
3981 | if (temp) |
3982 | return temp; | |
3983 | } | |
b65c1b5b | 3984 | |
30984c57 | 3985 | /* Don't change the mode of the MEM if that would change the meaning |
bf08edc1 HPN |
3986 | of the address. Similarly, don't allow widening, as that may |
3987 | access memory outside the defined object or using an address | |
3988 | that is invalid for a wider mode. */ | |
3c0cb5de | 3989 | if (MEM_P (SUBREG_REG (x)) |
30984c57 | 3990 | && (MEM_VOLATILE_P (SUBREG_REG (x)) |
bf08edc1 HPN |
3991 | || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0)) |
3992 | || (GET_MODE_SIZE (mode) | |
3993 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))) | |
30984c57 JJ |
3994 | return gen_rtx_CLOBBER (mode, const0_rtx); |
3995 | ||
87e3e0c1 RK |
3996 | /* Note that we cannot do any narrowing for non-constants since |
3997 | we might have been counting on using the fact that some bits were | |
3998 | zero. We now do this in the SET. */ | |
3999 | ||
230d793d RS |
4000 | break; |
4001 | ||
4002 | case NOT: | |
230d793d RS |
4003 | if (GET_CODE (XEXP (x, 0)) == SUBREG |
4004 | && subreg_lowpart_p (XEXP (x, 0)) | |
4005 | && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) | |
4006 | < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0))))) | |
4007 | && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT | |
4008 | && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx) | |
4009 | { | |
4010 | enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0))); | |
4011 | ||
38a448ca | 4012 | x = gen_rtx_ROTATE (inner_mode, |
f1c6ba8b RK |
4013 | simplify_gen_unary (NOT, inner_mode, const1_rtx, |
4014 | inner_mode), | |
38a448ca | 4015 | XEXP (SUBREG_REG (XEXP (x, 0)), 1)); |
4de249d9 | 4016 | return gen_lowpart (mode, x); |
230d793d | 4017 | } |
663522cb | 4018 | |
230d793d | 4019 | /* Apply De Morgan's laws to reduce number of patterns for machines |
23190837 AJ |
4020 | with negating logical insns (and-not, nand, etc.). If result has |
4021 | only one NOT, put it first, since that is how the patterns are | |
4022 | coded. */ | |
230d793d RS |
4023 | |
4024 | if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND) | |
23190837 | 4025 | { |
663522cb | 4026 | rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1); |
5bd60ce6 | 4027 | enum machine_mode op_mode; |
230d793d | 4028 | |
5bd60ce6 | 4029 | op_mode = GET_MODE (in1); |
f1c6ba8b | 4030 | in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode); |
230d793d | 4031 | |
5bd60ce6 RH |
4032 | op_mode = GET_MODE (in2); |
4033 | if (op_mode == VOIDmode) | |
4034 | op_mode = mode; | |
f1c6ba8b | 4035 | in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode); |
663522cb | 4036 | |
5bd60ce6 | 4037 | if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT) |
663522cb KH |
4038 | { |
4039 | rtx tem = in2; | |
4040 | in2 = in1; in1 = tem; | |
4041 | } | |
4042 | ||
f1c6ba8b RK |
4043 | return gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR, |
4044 | mode, in1, in2); | |
663522cb | 4045 | } |
230d793d RS |
4046 | break; |
4047 | ||
4048 | case NEG: | |
0f41302f | 4049 | /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */ |
4f61b3b7 RS |
4050 | if (GET_CODE (XEXP (x, 0)) == XOR |
4051 | && XEXP (XEXP (x, 0), 1) == const1_rtx | |
951553af | 4052 | && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1) |
1999435c | 4053 | return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx); |
d0ab8cd3 | 4054 | |
230d793d RS |
4055 | temp = expand_compound_operation (XEXP (x, 0)); |
4056 | ||
4057 | /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be | |
23190837 | 4058 | replaced by (lshiftrt X C). This will convert |
230d793d RS |
4059 | (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */ |
4060 | ||
4061 | if (GET_CODE (temp) == ASHIFTRT | |
4062 | && GET_CODE (XEXP (temp, 1)) == CONST_INT | |
4063 | && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1) | |
8079805d RK |
4064 | return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0), |
4065 | INTVAL (XEXP (temp, 1))); | |
230d793d | 4066 | |
951553af | 4067 | /* If X has only a single bit that might be nonzero, say, bit I, convert |
230d793d RS |
4068 | (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of |
4069 | MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to | |
4070 | (sign_extract X 1 Y). But only do this if TEMP isn't a register | |
4071 | or a SUBREG of one since we'd be making the expression more | |
4072 | complex if it was just a register. */ | |
4073 | ||
f8cfc6aa | 4074 | if (!REG_P (temp) |
230d793d | 4075 | && ! (GET_CODE (temp) == SUBREG |
f8cfc6aa | 4076 | && REG_P (SUBREG_REG (temp))) |
951553af | 4077 | && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0) |
230d793d RS |
4078 | { |
4079 | rtx temp1 = simplify_shift_const | |
5f4f0e22 CH |
4080 | (NULL_RTX, ASHIFTRT, mode, |
4081 | simplify_shift_const (NULL_RTX, ASHIFT, mode, temp, | |
230d793d RS |
4082 | GET_MODE_BITSIZE (mode) - 1 - i), |
4083 | GET_MODE_BITSIZE (mode) - 1 - i); | |
4084 | ||
4085 | /* If all we did was surround TEMP with the two shifts, we | |
4086 | haven't improved anything, so don't use it. Otherwise, | |
4087 | we are better off with TEMP1. */ | |
4088 | if (GET_CODE (temp1) != ASHIFTRT | |
4089 | || GET_CODE (XEXP (temp1, 0)) != ASHIFT | |
4090 | || XEXP (XEXP (temp1, 0), 0) != temp) | |
8079805d | 4091 | return temp1; |
230d793d RS |
4092 | } |
4093 | break; | |
4094 | ||
2ca9ae17 | 4095 | case TRUNCATE: |
e30fb98f JL |
4096 | /* We can't handle truncation to a partial integer mode here |
4097 | because we don't know the real bitsize of the partial | |
4098 | integer mode. */ | |
4099 | if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT) | |
4100 | break; | |
4101 | ||
80608e27 JL |
4102 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
4103 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode), | |
4104 | GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))) | |
2ca9ae17 JW |
4105 | SUBST (XEXP (x, 0), |
4106 | force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)), | |
4107 | GET_MODE_MASK (mode), NULL_RTX, 0)); | |
0f13a422 ILT |
4108 | |
4109 | /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */ | |
4110 | if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND | |
4111 | || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND) | |
4112 | && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode) | |
4113 | return XEXP (XEXP (x, 0), 0); | |
4114 | ||
4115 | /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is | |
4116 | (OP:SI foo:SI) if OP is NEG or ABS. */ | |
4117 | if ((GET_CODE (XEXP (x, 0)) == ABS | |
4118 | || GET_CODE (XEXP (x, 0)) == NEG) | |
4119 | && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND | |
4120 | || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND) | |
4121 | && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode) | |
f1c6ba8b RK |
4122 | return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode, |
4123 | XEXP (XEXP (XEXP (x, 0), 0), 0), mode); | |
0f13a422 ILT |
4124 | |
4125 | /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is | |
4126 | (truncate:SI x). */ | |
4127 | if (GET_CODE (XEXP (x, 0)) == SUBREG | |
4128 | && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE | |
4129 | && subreg_lowpart_p (XEXP (x, 0))) | |
4130 | return SUBREG_REG (XEXP (x, 0)); | |
4131 | ||
4132 | /* If we know that the value is already truncated, we can | |
14a774a9 RK |
4133 | replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION |
4134 | is nonzero for the corresponding modes. But don't do this | |
4135 | for an (LSHIFTRT (MULT ...)) since this will cause problems | |
4136 | with the umulXi3_highpart patterns. */ | |
6a992214 JL |
4137 | if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode), |
4138 | GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))) | |
4139 | && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))) | |
26c34780 | 4140 | >= (unsigned int) (GET_MODE_BITSIZE (mode) + 1) |
14a774a9 | 4141 | && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT |
23190837 | 4142 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT)) |
4de249d9 | 4143 | return gen_lowpart (mode, XEXP (x, 0)); |
0f13a422 ILT |
4144 | |
4145 | /* A truncate of a comparison can be replaced with a subreg if | |
4146 | STORE_FLAG_VALUE permits. This is like the previous test, | |
4147 | but it works even if the comparison is done in a mode larger | |
4148 | than HOST_BITS_PER_WIDE_INT. */ | |
4149 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
ec8e098d | 4150 | && COMPARISON_P (XEXP (x, 0)) |
663522cb | 4151 | && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0) |
4de249d9 | 4152 | return gen_lowpart (mode, XEXP (x, 0)); |
0f13a422 ILT |
4153 | |
4154 | /* Similarly, a truncate of a register whose value is a | |
4155 | comparison can be replaced with a subreg if STORE_FLAG_VALUE | |
4156 | permits. */ | |
4157 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
663522cb | 4158 | && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0 |
0f13a422 | 4159 | && (temp = get_last_value (XEXP (x, 0))) |
ec8e098d | 4160 | && COMPARISON_P (temp)) |
4de249d9 | 4161 | return gen_lowpart (mode, XEXP (x, 0)); |
0f13a422 | 4162 | |
2ca9ae17 JW |
4163 | break; |
4164 | ||
230d793d RS |
4165 | case FLOAT_TRUNCATE: |
4166 | /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */ | |
4167 | if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND | |
4168 | && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode) | |
663522cb | 4169 | return XEXP (XEXP (x, 0), 0); |
4635f748 | 4170 | |
73a39fc4 EC |
4171 | /* (float_truncate:SF (float_truncate:DF foo:XF)) |
4172 | = (float_truncate:SF foo:XF). | |
e0bb17a8 | 4173 | This may eliminate double rounding, so it is unsafe. |
949824fe | 4174 | |
73a39fc4 EC |
4175 | (float_truncate:SF (float_extend:XF foo:DF)) |
4176 | = (float_truncate:SF foo:DF). | |
949824fe | 4177 | |
73a39fc4 | 4178 | (float_truncate:DF (float_extend:XF foo:SF)) |
3dc575ff | 4179 | = (float_extend:SF foo:DF). */ |
949824fe JH |
4180 | if ((GET_CODE (XEXP (x, 0)) == FLOAT_TRUNCATE |
4181 | && flag_unsafe_math_optimizations) | |
4182 | || GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND) | |
4183 | return simplify_gen_unary (GET_MODE_SIZE (GET_MODE (XEXP (XEXP (x, 0), | |
79a490a9 AJ |
4184 | 0))) |
4185 | > GET_MODE_SIZE (mode) | |
949824fe | 4186 | ? FLOAT_TRUNCATE : FLOAT_EXTEND, |
79a490a9 | 4187 | mode, |
cb119f82 | 4188 | XEXP (XEXP (x, 0), 0), mode); |
949824fe JH |
4189 | |
4190 | /* (float_truncate (float x)) is (float x) */ | |
4191 | if (GET_CODE (XEXP (x, 0)) == FLOAT | |
4192 | && (flag_unsafe_math_optimizations | |
4193 | || ((unsigned)significand_size (GET_MODE (XEXP (x, 0))) | |
4194 | >= (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0))) | |
4195 | - num_sign_bit_copies (XEXP (XEXP (x, 0), 0), | |
4196 | GET_MODE (XEXP (XEXP (x, 0), 0))))))) | |
4197 | return simplify_gen_unary (FLOAT, mode, | |
4198 | XEXP (XEXP (x, 0), 0), | |
4199 | GET_MODE (XEXP (XEXP (x, 0), 0))); | |
4200 | ||
4635f748 RK |
4201 | /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is |
4202 | (OP:SF foo:SF) if OP is NEG or ABS. */ | |
4203 | if ((GET_CODE (XEXP (x, 0)) == ABS | |
4204 | || GET_CODE (XEXP (x, 0)) == NEG) | |
4205 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND | |
4206 | && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode) | |
f1c6ba8b RK |
4207 | return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode, |
4208 | XEXP (XEXP (XEXP (x, 0), 0), 0), mode); | |
1d12df72 RK |
4209 | |
4210 | /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0)) | |
4211 | is (float_truncate:SF x). */ | |
4212 | if (GET_CODE (XEXP (x, 0)) == SUBREG | |
4213 | && subreg_lowpart_p (XEXP (x, 0)) | |
4214 | && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE) | |
4215 | return SUBREG_REG (XEXP (x, 0)); | |
663522cb | 4216 | break; |
949824fe JH |
4217 | case FLOAT_EXTEND: |
4218 | /* (float_extend (float_extend x)) is (float_extend x) | |
73a39fc4 | 4219 | |
949824fe | 4220 | (float_extend (float x)) is (float x) assuming that double |
73a39fc4 | 4221 | rounding can't happen. |
949824fe JH |
4222 | */ |
4223 | if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND | |
4224 | || (GET_CODE (XEXP (x, 0)) == FLOAT | |
4225 | && ((unsigned)significand_size (GET_MODE (XEXP (x, 0))) | |
4226 | >= (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0))) | |
4227 | - num_sign_bit_copies (XEXP (XEXP (x, 0), 0), | |
4228 | GET_MODE (XEXP (XEXP (x, 0), 0))))))) | |
4229 | return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode, | |
4230 | XEXP (XEXP (x, 0), 0), | |
4231 | GET_MODE (XEXP (XEXP (x, 0), 0))); | |
230d793d | 4232 | |
949824fe | 4233 | break; |
230d793d RS |
4234 | #ifdef HAVE_cc0 |
4235 | case COMPARE: | |
4236 | /* Convert (compare FOO (const_int 0)) to FOO unless we aren't | |
4237 | using cc0, in which case we want to leave it as a COMPARE | |
4238 | so we can distinguish it from a register-register-copy. */ | |
4239 | if (XEXP (x, 1) == const0_rtx) | |
4240 | return XEXP (x, 0); | |
4241 | ||
71925bc0 RS |
4242 | /* x - 0 is the same as x unless x's mode has signed zeros and |
4243 | allows rounding towards -infinity. Under those conditions, | |
4244 | 0 - 0 is -0. */ | |
4245 | if (!(HONOR_SIGNED_ZEROS (GET_MODE (XEXP (x, 0))) | |
4246 | && HONOR_SIGN_DEPENDENT_ROUNDING (GET_MODE (XEXP (x, 0)))) | |
230d793d RS |
4247 | && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0)))) |
4248 | return XEXP (x, 0); | |
4249 | break; | |
4250 | #endif | |
4251 | ||
4252 | case CONST: | |
4253 | /* (const (const X)) can become (const X). Do it this way rather than | |
4254 | returning the inner CONST since CONST can be shared with a | |
4255 | REG_EQUAL note. */ | |
4256 | if (GET_CODE (XEXP (x, 0)) == CONST) | |
4257 | SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0)); | |
4258 | break; | |
4259 | ||
4260 | #ifdef HAVE_lo_sum | |
4261 | case LO_SUM: | |
4262 | /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we | |
4263 | can add in an offset. find_split_point will split this address up | |
4264 | again if it doesn't match. */ | |
4265 | if (GET_CODE (XEXP (x, 0)) == HIGH | |
4266 | && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))) | |
4267 | return XEXP (x, 1); | |
4268 | break; | |
4269 | #endif | |
4270 | ||
4271 | case PLUS: | |
16823694 GK |
4272 | /* Canonicalize (plus (mult (neg B) C) A) to (minus A (mult B C)). |
4273 | */ | |
73a39fc4 | 4274 | if (GET_CODE (XEXP (x, 0)) == MULT |
16823694 GK |
4275 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == NEG) |
4276 | { | |
4277 | rtx in1, in2; | |
73a39fc4 | 4278 | |
16823694 GK |
4279 | in1 = XEXP (XEXP (XEXP (x, 0), 0), 0); |
4280 | in2 = XEXP (XEXP (x, 0), 1); | |
1999435c PB |
4281 | return gen_binary (MINUS, mode, XEXP (x, 1), |
4282 | gen_binary (MULT, mode, in1, in2)); | |
16823694 GK |
4283 | } |
4284 | ||
230d793d RS |
4285 | /* If we have (plus (plus (A const) B)), associate it so that CONST is |
4286 | outermost. That's because that's the way indexed addresses are | |
4287 | supposed to appear. This code used to check many more cases, but | |
4288 | they are now checked elsewhere. */ | |
4289 | if (GET_CODE (XEXP (x, 0)) == PLUS | |
4290 | && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1))) | |
1999435c PB |
4291 | return gen_binary (PLUS, mode, |
4292 | gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), | |
4293 | XEXP (x, 1)), | |
4294 | XEXP (XEXP (x, 0), 1)); | |
230d793d RS |
4295 | |
4296 | /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>) | |
4297 | when c is (const_int (pow2 + 1) / 2) is a sign extension of a | |
4298 | bit-field and can be replaced by either a sign_extend or a | |
e6380233 JL |
4299 | sign_extract. The `and' may be a zero_extend and the two |
4300 | <c>, -<c> constants may be reversed. */ | |
230d793d RS |
4301 | if (GET_CODE (XEXP (x, 0)) == XOR |
4302 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
4303 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
663522cb | 4304 | && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1)) |
e6380233 JL |
4305 | && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0 |
4306 | || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0) | |
5f4f0e22 | 4307 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
230d793d RS |
4308 | && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND |
4309 | && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT | |
4310 | && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) | |
5f4f0e22 | 4311 | == ((HOST_WIDE_INT) 1 << (i + 1)) - 1)) |
230d793d RS |
4312 | || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND |
4313 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0))) | |
770ae6cc | 4314 | == (unsigned int) i + 1)))) |
8079805d RK |
4315 | return simplify_shift_const |
4316 | (NULL_RTX, ASHIFTRT, mode, | |
4317 | simplify_shift_const (NULL_RTX, ASHIFT, mode, | |
4318 | XEXP (XEXP (XEXP (x, 0), 0), 0), | |
4319 | GET_MODE_BITSIZE (mode) - (i + 1)), | |
4320 | GET_MODE_BITSIZE (mode) - (i + 1)); | |
230d793d | 4321 | |
bc0776c6 RK |
4322 | /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if |
4323 | C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE | |
4324 | is 1. This produces better code than the alternative immediately | |
4325 | below. */ | |
ec8e098d | 4326 | if (COMPARISON_P (XEXP (x, 0)) |
bc0776c6 | 4327 | && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx) |
9a915772 JH |
4328 | || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)) |
4329 | && (reversed = reversed_comparison (XEXP (x, 0), mode, | |
4330 | XEXP (XEXP (x, 0), 0), | |
4331 | XEXP (XEXP (x, 0), 1)))) | |
8079805d | 4332 | return |
f1c6ba8b | 4333 | simplify_gen_unary (NEG, mode, reversed, mode); |
bc0776c6 RK |
4334 | |
4335 | /* If only the low-order bit of X is possibly nonzero, (plus x -1) | |
230d793d RS |
4336 | can become (ashiftrt (ashift (xor x 1) C) C) where C is |
4337 | the bitsize of the mode - 1. This allows simplification of | |
4338 | "a = (b & 8) == 0;" */ | |
4339 | if (XEXP (x, 1) == constm1_rtx | |
f8cfc6aa | 4340 | && !REG_P (XEXP (x, 0)) |
e869aa39 | 4341 | && ! (GET_CODE (XEXP (x, 0)) == SUBREG |
f8cfc6aa | 4342 | && REG_P (SUBREG_REG (XEXP (x, 0)))) |
951553af | 4343 | && nonzero_bits (XEXP (x, 0), mode) == 1) |
8079805d RK |
4344 | return simplify_shift_const (NULL_RTX, ASHIFTRT, mode, |
4345 | simplify_shift_const (NULL_RTX, ASHIFT, mode, | |
f1c6ba8b | 4346 | gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx), |
8079805d RK |
4347 | GET_MODE_BITSIZE (mode) - 1), |
4348 | GET_MODE_BITSIZE (mode) - 1); | |
02f4ada4 RK |
4349 | |
4350 | /* If we are adding two things that have no bits in common, convert | |
4351 | the addition into an IOR. This will often be further simplified, | |
4352 | for example in cases like ((a & 1) + (a & 2)), which can | |
4353 | become a & 3. */ | |
4354 | ||
ac49a949 | 4355 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
951553af RK |
4356 | && (nonzero_bits (XEXP (x, 0), mode) |
4357 | & nonzero_bits (XEXP (x, 1), mode)) == 0) | |
085f1714 RH |
4358 | { |
4359 | /* Try to simplify the expression further. */ | |
1999435c | 4360 | rtx tor = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1)); |
6621d78e | 4361 | temp = combine_simplify_rtx (tor, mode, in_dest); |
085f1714 RH |
4362 | |
4363 | /* If we could, great. If not, do not go ahead with the IOR | |
4364 | replacement, since PLUS appears in many special purpose | |
4365 | address arithmetic instructions. */ | |
4366 | if (GET_CODE (temp) != CLOBBER && temp != tor) | |
4367 | return temp; | |
4368 | } | |
230d793d RS |
4369 | break; |
4370 | ||
4371 | case MINUS: | |
0802d516 RK |
4372 | /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done |
4373 | by reversing the comparison code if valid. */ | |
4374 | if (STORE_FLAG_VALUE == 1 | |
4375 | && XEXP (x, 0) == const1_rtx | |
ec8e098d | 4376 | && COMPARISON_P (XEXP (x, 1)) |
9a915772 JH |
4377 | && (reversed = reversed_comparison (XEXP (x, 1), mode, |
4378 | XEXP (XEXP (x, 1), 0), | |
4379 | XEXP (XEXP (x, 1), 1)))) | |
4380 | return reversed; | |
5109d49f | 4381 | |
230d793d RS |
4382 | /* (minus <foo> (and <foo> (const_int -pow2))) becomes |
4383 | (and <foo> (const_int pow2-1)) */ | |
4384 | if (GET_CODE (XEXP (x, 1)) == AND | |
4385 | && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT | |
663522cb | 4386 | && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0 |
230d793d | 4387 | && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0))) |
8079805d | 4388 | return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0), |
663522cb | 4389 | -INTVAL (XEXP (XEXP (x, 1), 1)) - 1); |
7bef8680 | 4390 | |
16823694 GK |
4391 | /* Canonicalize (minus A (mult (neg B) C)) to (plus (mult B C) A). |
4392 | */ | |
73a39fc4 | 4393 | if (GET_CODE (XEXP (x, 1)) == MULT |
16823694 GK |
4394 | && GET_CODE (XEXP (XEXP (x, 1), 0)) == NEG) |
4395 | { | |
4396 | rtx in1, in2; | |
73a39fc4 | 4397 | |
16823694 GK |
4398 | in1 = XEXP (XEXP (XEXP (x, 1), 0), 0); |
4399 | in2 = XEXP (XEXP (x, 1), 1); | |
1999435c PB |
4400 | return gen_binary (PLUS, mode, gen_binary (MULT, mode, in1, in2), |
4401 | XEXP (x, 0)); | |
16823694 GK |
4402 | } |
4403 | ||
73a39fc4 | 4404 | /* Canonicalize (minus (neg A) (mult B C)) to |
e869aa39 | 4405 | (minus (mult (neg B) C) A). */ |
73a39fc4 | 4406 | if (GET_CODE (XEXP (x, 1)) == MULT |
16823694 GK |
4407 | && GET_CODE (XEXP (x, 0)) == NEG) |
4408 | { | |
4409 | rtx in1, in2; | |
73a39fc4 | 4410 | |
16823694 GK |
4411 | in1 = simplify_gen_unary (NEG, mode, XEXP (XEXP (x, 1), 0), mode); |
4412 | in2 = XEXP (XEXP (x, 1), 1); | |
1999435c PB |
4413 | return gen_binary (MINUS, mode, gen_binary (MULT, mode, in1, in2), |
4414 | XEXP (XEXP (x, 0), 0)); | |
16823694 GK |
4415 | } |
4416 | ||
7bef8680 RK |
4417 | /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for |
4418 | integers. */ | |
4419 | if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode)) | |
1999435c PB |
4420 | return gen_binary (MINUS, mode, |
4421 | gen_binary (MINUS, mode, XEXP (x, 0), | |
4422 | XEXP (XEXP (x, 1), 0)), | |
4423 | XEXP (XEXP (x, 1), 1)); | |
230d793d RS |
4424 | break; |
4425 | ||
4426 | case MULT: | |
4427 | /* If we have (mult (plus A B) C), apply the distributive law and then | |
4428 | the inverse distributive law to see if things simplify. This | |
4429 | occurs mostly in addresses, often when unrolling loops. */ | |
4430 | ||
4431 | if (GET_CODE (XEXP (x, 0)) == PLUS) | |
4432 | { | |
1999435c PB |
4433 | x = apply_distributive_law |
4434 | (gen_binary (PLUS, mode, | |
4435 | gen_binary (MULT, mode, | |
4436 | XEXP (XEXP (x, 0), 0), XEXP (x, 1)), | |
4437 | gen_binary (MULT, mode, | |
4438 | XEXP (XEXP (x, 0), 1), | |
4439 | copy_rtx (XEXP (x, 1))))); | |
4440 | ||
4441 | if (GET_CODE (x) != MULT) | |
4442 | return x; | |
230d793d | 4443 | } |
4ba5f925 JH |
4444 | /* Try simplify a*(b/c) as (a*b)/c. */ |
4445 | if (FLOAT_MODE_P (mode) && flag_unsafe_math_optimizations | |
4446 | && GET_CODE (XEXP (x, 0)) == DIV) | |
4447 | { | |
4448 | rtx tem = simplify_binary_operation (MULT, mode, | |
4449 | XEXP (XEXP (x, 0), 0), | |
4450 | XEXP (x, 1)); | |
4451 | if (tem) | |
1999435c | 4452 | return gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1)); |
4ba5f925 | 4453 | } |
230d793d RS |
4454 | break; |
4455 | ||
4456 | case UDIV: | |
4457 | /* If this is a divide by a power of two, treat it as a shift if | |
4458 | its first operand is a shift. */ | |
4459 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
4460 | && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0 | |
4461 | && (GET_CODE (XEXP (x, 0)) == ASHIFT | |
4462 | || GET_CODE (XEXP (x, 0)) == LSHIFTRT | |
4463 | || GET_CODE (XEXP (x, 0)) == ASHIFTRT | |
4464 | || GET_CODE (XEXP (x, 0)) == ROTATE | |
4465 | || GET_CODE (XEXP (x, 0)) == ROTATERT)) | |
8079805d | 4466 | return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i); |
230d793d RS |
4467 | break; |
4468 | ||
4469 | case EQ: case NE: | |
4470 | case GT: case GTU: case GE: case GEU: | |
4471 | case LT: case LTU: case LE: case LEU: | |
69bc0a1f | 4472 | case UNEQ: case LTGT: |
23190837 AJ |
4473 | case UNGT: case UNGE: |
4474 | case UNLT: case UNLE: | |
69bc0a1f | 4475 | case UNORDERED: case ORDERED: |
230d793d RS |
4476 | /* If the first operand is a condition code, we can't do anything |
4477 | with it. */ | |
4478 | if (GET_CODE (XEXP (x, 0)) == COMPARE | |
4479 | || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC | |
8beccec8 | 4480 | && ! CC0_P (XEXP (x, 0)))) |
230d793d RS |
4481 | { |
4482 | rtx op0 = XEXP (x, 0); | |
4483 | rtx op1 = XEXP (x, 1); | |
4484 | enum rtx_code new_code; | |
4485 | ||
4486 | if (GET_CODE (op0) == COMPARE) | |
4487 | op1 = XEXP (op0, 1), op0 = XEXP (op0, 0); | |
4488 | ||
4489 | /* Simplify our comparison, if possible. */ | |
4490 | new_code = simplify_comparison (code, &op0, &op1); | |
4491 | ||
230d793d | 4492 | /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X |
951553af | 4493 | if only the low-order bit is possibly nonzero in X (such as when |
5109d49f RK |
4494 | X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to |
4495 | (xor X 1) or (minus 1 X); we use the former. Finally, if X is | |
4496 | known to be either 0 or -1, NE becomes a NEG and EQ becomes | |
4497 | (plus X 1). | |
4498 | ||
4499 | Remove any ZERO_EXTRACT we made when thinking this was a | |
4500 | comparison. It may now be simpler to use, e.g., an AND. If a | |
4501 | ZERO_EXTRACT is indeed appropriate, it will be placed back by | |
4502 | the call to make_compound_operation in the SET case. */ | |
4503 | ||
0802d516 RK |
4504 | if (STORE_FLAG_VALUE == 1 |
4505 | && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT | |
a191f0ee RH |
4506 | && op1 == const0_rtx |
4507 | && mode == GET_MODE (op0) | |
4508 | && nonzero_bits (op0, mode) == 1) | |
4de249d9 PB |
4509 | return gen_lowpart (mode, |
4510 | expand_compound_operation (op0)); | |
5109d49f | 4511 | |
0802d516 RK |
4512 | else if (STORE_FLAG_VALUE == 1 |
4513 | && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT | |
5109d49f | 4514 | && op1 == const0_rtx |
a191f0ee | 4515 | && mode == GET_MODE (op0) |
5109d49f RK |
4516 | && (num_sign_bit_copies (op0, mode) |
4517 | == GET_MODE_BITSIZE (mode))) | |
4518 | { | |
4519 | op0 = expand_compound_operation (op0); | |
f1c6ba8b | 4520 | return simplify_gen_unary (NEG, mode, |
4de249d9 | 4521 | gen_lowpart (mode, op0), |
f1c6ba8b | 4522 | mode); |
5109d49f RK |
4523 | } |
4524 | ||
0802d516 RK |
4525 | else if (STORE_FLAG_VALUE == 1 |
4526 | && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT | |
230d793d | 4527 | && op1 == const0_rtx |
a191f0ee | 4528 | && mode == GET_MODE (op0) |
5109d49f | 4529 | && nonzero_bits (op0, mode) == 1) |
818b11b9 RK |
4530 | { |
4531 | op0 = expand_compound_operation (op0); | |
1999435c PB |
4532 | return gen_binary (XOR, mode, |
4533 | gen_lowpart (mode, op0), | |
4534 | const1_rtx); | |
5109d49f | 4535 | } |
818b11b9 | 4536 | |
0802d516 RK |
4537 | else if (STORE_FLAG_VALUE == 1 |
4538 | && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT | |
5109d49f | 4539 | && op1 == const0_rtx |
a191f0ee | 4540 | && mode == GET_MODE (op0) |
5109d49f RK |
4541 | && (num_sign_bit_copies (op0, mode) |
4542 | == GET_MODE_BITSIZE (mode))) | |
4543 | { | |
4544 | op0 = expand_compound_operation (op0); | |
4de249d9 | 4545 | return plus_constant (gen_lowpart (mode, op0), 1); |
818b11b9 | 4546 | } |
230d793d | 4547 | |
5109d49f RK |
4548 | /* If STORE_FLAG_VALUE is -1, we have cases similar to |
4549 | those above. */ | |
0802d516 RK |
4550 | if (STORE_FLAG_VALUE == -1 |
4551 | && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT | |
230d793d | 4552 | && op1 == const0_rtx |
5109d49f RK |
4553 | && (num_sign_bit_copies (op0, mode) |
4554 | == GET_MODE_BITSIZE (mode))) | |
4de249d9 PB |
4555 | return gen_lowpart (mode, |
4556 | expand_compound_operation (op0)); | |
5109d49f | 4557 | |
0802d516 RK |
4558 | else if (STORE_FLAG_VALUE == -1 |
4559 | && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT | |
5109d49f | 4560 | && op1 == const0_rtx |
a191f0ee | 4561 | && mode == GET_MODE (op0) |
5109d49f RK |
4562 | && nonzero_bits (op0, mode) == 1) |
4563 | { | |
4564 | op0 = expand_compound_operation (op0); | |
f1c6ba8b | 4565 | return simplify_gen_unary (NEG, mode, |
4de249d9 | 4566 | gen_lowpart (mode, op0), |
f1c6ba8b | 4567 | mode); |
5109d49f RK |
4568 | } |
4569 | ||
0802d516 RK |
4570 | else if (STORE_FLAG_VALUE == -1 |
4571 | && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT | |
5109d49f | 4572 | && op1 == const0_rtx |
a191f0ee | 4573 | && mode == GET_MODE (op0) |
5109d49f RK |
4574 | && (num_sign_bit_copies (op0, mode) |
4575 | == GET_MODE_BITSIZE (mode))) | |
230d793d | 4576 | { |
818b11b9 | 4577 | op0 = expand_compound_operation (op0); |
f1c6ba8b | 4578 | return simplify_gen_unary (NOT, mode, |
4de249d9 | 4579 | gen_lowpart (mode, op0), |
f1c6ba8b | 4580 | mode); |
5109d49f RK |
4581 | } |
4582 | ||
4583 | /* If X is 0/1, (eq X 0) is X-1. */ | |
0802d516 RK |
4584 | else if (STORE_FLAG_VALUE == -1 |
4585 | && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT | |
5109d49f | 4586 | && op1 == const0_rtx |
a191f0ee | 4587 | && mode == GET_MODE (op0) |
5109d49f RK |
4588 | && nonzero_bits (op0, mode) == 1) |
4589 | { | |
4590 | op0 = expand_compound_operation (op0); | |
4de249d9 | 4591 | return plus_constant (gen_lowpart (mode, op0), -1); |
230d793d | 4592 | } |
230d793d RS |
4593 | |
4594 | /* If STORE_FLAG_VALUE says to just test the sign bit and X has just | |
951553af RK |
4595 | one bit that might be nonzero, we can convert (ne x 0) to |
4596 | (ashift x c) where C puts the bit in the sign bit. Remove any | |
4597 | AND with STORE_FLAG_VALUE when we are done, since we are only | |
4598 | going to test the sign bit. */ | |
3f508eca | 4599 | if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT |
5f4f0e22 | 4600 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
0802d516 | 4601 | && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode)) |
e869aa39 | 4602 | == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1)) |
230d793d RS |
4603 | && op1 == const0_rtx |
4604 | && mode == GET_MODE (op0) | |
5109d49f | 4605 | && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0) |
230d793d | 4606 | { |
818b11b9 RK |
4607 | x = simplify_shift_const (NULL_RTX, ASHIFT, mode, |
4608 | expand_compound_operation (op0), | |
230d793d RS |
4609 | GET_MODE_BITSIZE (mode) - 1 - i); |
4610 | if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx) | |
4611 | return XEXP (x, 0); | |
4612 | else | |
4613 | return x; | |
4614 | } | |
4615 | ||
4616 | /* If the code changed, return a whole new comparison. */ | |
4617 | if (new_code != code) | |
f1c6ba8b | 4618 | return gen_rtx_fmt_ee (new_code, mode, op0, op1); |
230d793d | 4619 | |
663522cb | 4620 | /* Otherwise, keep this operation, but maybe change its operands. |
230d793d RS |
4621 | This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */ |
4622 | SUBST (XEXP (x, 0), op0); | |
4623 | SUBST (XEXP (x, 1), op1); | |
4624 | } | |
4625 | break; | |
663522cb | 4626 | |
230d793d | 4627 | case IF_THEN_ELSE: |
8079805d | 4628 | return simplify_if_then_else (x); |
9210df58 | 4629 | |
8079805d RK |
4630 | case ZERO_EXTRACT: |
4631 | case SIGN_EXTRACT: | |
4632 | case ZERO_EXTEND: | |
4633 | case SIGN_EXTEND: | |
0f41302f | 4634 | /* If we are processing SET_DEST, we are done. */ |
8079805d RK |
4635 | if (in_dest) |
4636 | return x; | |
d0ab8cd3 | 4637 | |
8079805d | 4638 | return expand_compound_operation (x); |
d0ab8cd3 | 4639 | |
8079805d RK |
4640 | case SET: |
4641 | return simplify_set (x); | |
1a26b032 | 4642 | |
8079805d RK |
4643 | case AND: |
4644 | case IOR: | |
4645 | case XOR: | |
6621d78e | 4646 | return simplify_logical (x); |
d0ab8cd3 | 4647 | |
663522cb | 4648 | case ABS: |
8079805d RK |
4649 | /* (abs (neg <foo>)) -> (abs <foo>) */ |
4650 | if (GET_CODE (XEXP (x, 0)) == NEG) | |
4651 | SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0)); | |
1a26b032 | 4652 | |
b472527b JL |
4653 | /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS), |
4654 | do nothing. */ | |
4655 | if (GET_MODE (XEXP (x, 0)) == VOIDmode) | |
4656 | break; | |
f40421ce | 4657 | |
8079805d RK |
4658 | /* If operand is something known to be positive, ignore the ABS. */ |
4659 | if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS | |
4660 | || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) | |
4661 | <= HOST_BITS_PER_WIDE_INT) | |
4662 | && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0))) | |
4663 | & ((HOST_WIDE_INT) 1 | |
4664 | << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))) | |
4665 | == 0))) | |
4666 | return XEXP (x, 0); | |
1a26b032 | 4667 | |
8079805d RK |
4668 | /* If operand is known to be only -1 or 0, convert ABS to NEG. */ |
4669 | if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode)) | |
f1c6ba8b | 4670 | return gen_rtx_NEG (mode, XEXP (x, 0)); |
1a26b032 | 4671 | |
8079805d | 4672 | break; |
1a26b032 | 4673 | |
8079805d RK |
4674 | case FFS: |
4675 | /* (ffs (*_extend <X>)) = (ffs <X>) */ | |
4676 | if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND | |
4677 | || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND) | |
4678 | SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0)); | |
4679 | break; | |
1a26b032 | 4680 | |
2928cd7a RH |
4681 | case POPCOUNT: |
4682 | case PARITY: | |
4683 | /* (pop* (zero_extend <X>)) = (pop* <X>) */ | |
4684 | if (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND) | |
4685 | SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0)); | |
4686 | break; | |
4687 | ||
8079805d RK |
4688 | case FLOAT: |
4689 | /* (float (sign_extend <X>)) = (float <X>). */ | |
4690 | if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND) | |
4691 | SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0)); | |
4692 | break; | |
1a26b032 | 4693 | |
8079805d RK |
4694 | case ASHIFT: |
4695 | case LSHIFTRT: | |
4696 | case ASHIFTRT: | |
4697 | case ROTATE: | |
4698 | case ROTATERT: | |
4699 | /* If this is a shift by a constant amount, simplify it. */ | |
4700 | if (GET_CODE (XEXP (x, 1)) == CONST_INT) | |
663522cb | 4701 | return simplify_shift_const (x, code, mode, XEXP (x, 0), |
8079805d RK |
4702 | INTVAL (XEXP (x, 1))); |
4703 | ||
f8cfc6aa | 4704 | else if (SHIFT_COUNT_TRUNCATED && !REG_P (XEXP (x, 1))) |
8079805d | 4705 | SUBST (XEXP (x, 1), |
f1b1186f | 4706 | force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)), |
663522cb | 4707 | ((HOST_WIDE_INT) 1 |
8079805d RK |
4708 | << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x)))) |
4709 | - 1, | |
4710 | NULL_RTX, 0)); | |
8079805d | 4711 | break; |
e9a25f70 | 4712 | |
82be40f7 BS |
4713 | case VEC_SELECT: |
4714 | { | |
4715 | rtx op0 = XEXP (x, 0); | |
4716 | rtx op1 = XEXP (x, 1); | |
4717 | int len; | |
4718 | ||
341c100f | 4719 | gcc_assert (GET_CODE (op1) == PARALLEL); |
82be40f7 BS |
4720 | len = XVECLEN (op1, 0); |
4721 | if (len == 1 | |
4722 | && GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT | |
4723 | && GET_CODE (op0) == VEC_CONCAT) | |
4724 | { | |
4725 | int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x)); | |
4726 | ||
4727 | /* Try to find the element in the VEC_CONCAT. */ | |
4728 | for (;;) | |
4729 | { | |
4730 | if (GET_MODE (op0) == GET_MODE (x)) | |
4731 | return op0; | |
4732 | if (GET_CODE (op0) == VEC_CONCAT) | |
4733 | { | |
4734 | HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0))); | |
4735 | if (op0_size < offset) | |
4736 | op0 = XEXP (op0, 0); | |
4737 | else | |
4738 | { | |
4739 | offset -= op0_size; | |
4740 | op0 = XEXP (op0, 1); | |
4741 | } | |
4742 | } | |
4743 | else | |
4744 | break; | |
4745 | } | |
4746 | } | |
4747 | } | |
4748 | ||
4749 | break; | |
23190837 | 4750 | |
e9a25f70 JL |
4751 | default: |
4752 | break; | |
8079805d RK |
4753 | } |
4754 | ||
4755 | return x; | |
4756 | } | |
4757 | \f | |
4758 | /* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */ | |
5109d49f | 4759 | |
8079805d | 4760 | static rtx |
79a490a9 | 4761 | simplify_if_then_else (rtx x) |
8079805d RK |
4762 | { |
4763 | enum machine_mode mode = GET_MODE (x); | |
4764 | rtx cond = XEXP (x, 0); | |
d6edb99e ZW |
4765 | rtx true_rtx = XEXP (x, 1); |
4766 | rtx false_rtx = XEXP (x, 2); | |
8079805d | 4767 | enum rtx_code true_code = GET_CODE (cond); |
ec8e098d | 4768 | int comparison_p = COMPARISON_P (cond); |
8079805d RK |
4769 | rtx temp; |
4770 | int i; | |
9a915772 JH |
4771 | enum rtx_code false_code; |
4772 | rtx reversed; | |
8079805d | 4773 | |
0f41302f | 4774 | /* Simplify storing of the truth value. */ |
d6edb99e | 4775 | if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx) |
1999435c | 4776 | return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1)); |
663522cb | 4777 | |
0f41302f | 4778 | /* Also when the truth value has to be reversed. */ |
9a915772 | 4779 | if (comparison_p |
d6edb99e | 4780 | && true_rtx == const0_rtx && false_rtx == const_true_rtx |
9a915772 JH |
4781 | && (reversed = reversed_comparison (cond, mode, XEXP (cond, 0), |
4782 | XEXP (cond, 1)))) | |
4783 | return reversed; | |
8079805d RK |
4784 | |
4785 | /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used | |
4786 | in it is being compared against certain values. Get the true and false | |
4787 | comparisons and see if that says anything about the value of each arm. */ | |
4788 | ||
9a915772 JH |
4789 | if (comparison_p |
4790 | && ((false_code = combine_reversed_comparison_code (cond)) | |
4791 | != UNKNOWN) | |
f8cfc6aa | 4792 | && REG_P (XEXP (cond, 0))) |
8079805d RK |
4793 | { |
4794 | HOST_WIDE_INT nzb; | |
4795 | rtx from = XEXP (cond, 0); | |
8079805d RK |
4796 | rtx true_val = XEXP (cond, 1); |
4797 | rtx false_val = true_val; | |
4798 | int swapped = 0; | |
9210df58 | 4799 | |
8079805d | 4800 | /* If FALSE_CODE is EQ, swap the codes and arms. */ |
5109d49f | 4801 | |
8079805d | 4802 | if (false_code == EQ) |
1a26b032 | 4803 | { |
8079805d | 4804 | swapped = 1, true_code = EQ, false_code = NE; |
d6edb99e | 4805 | temp = true_rtx, true_rtx = false_rtx, false_rtx = temp; |
8079805d | 4806 | } |
5109d49f | 4807 | |
8079805d RK |
4808 | /* If we are comparing against zero and the expression being tested has |
4809 | only a single bit that might be nonzero, that is its value when it is | |
4810 | not equal to zero. Similarly if it is known to be -1 or 0. */ | |
4811 | ||
4812 | if (true_code == EQ && true_val == const0_rtx | |
4813 | && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0) | |
4814 | false_code = EQ, false_val = GEN_INT (nzb); | |
4815 | else if (true_code == EQ && true_val == const0_rtx | |
4816 | && (num_sign_bit_copies (from, GET_MODE (from)) | |
4817 | == GET_MODE_BITSIZE (GET_MODE (from)))) | |
4818 | false_code = EQ, false_val = constm1_rtx; | |
4819 | ||
4820 | /* Now simplify an arm if we know the value of the register in the | |
4821 | branch and it is used in the arm. Be careful due to the potential | |
4822 | of locally-shared RTL. */ | |
4823 | ||
d6edb99e ZW |
4824 | if (reg_mentioned_p (from, true_rtx)) |
4825 | true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code, | |
4826 | from, true_val), | |
8079805d | 4827 | pc_rtx, pc_rtx, 0, 0); |
d6edb99e ZW |
4828 | if (reg_mentioned_p (from, false_rtx)) |
4829 | false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code, | |
8079805d RK |
4830 | from, false_val), |
4831 | pc_rtx, pc_rtx, 0, 0); | |
4832 | ||
d6edb99e ZW |
4833 | SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx); |
4834 | SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx); | |
8079805d | 4835 | |
d6edb99e ZW |
4836 | true_rtx = XEXP (x, 1); |
4837 | false_rtx = XEXP (x, 2); | |
4838 | true_code = GET_CODE (cond); | |
8079805d | 4839 | } |
5109d49f | 4840 | |
8079805d RK |
4841 | /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be |
4842 | reversed, do so to avoid needing two sets of patterns for | |
4843 | subtract-and-branch insns. Similarly if we have a constant in the true | |
4844 | arm, the false arm is the same as the first operand of the comparison, or | |
4845 | the false arm is more complicated than the true arm. */ | |
4846 | ||
9a915772 JH |
4847 | if (comparison_p |
4848 | && combine_reversed_comparison_code (cond) != UNKNOWN | |
d6edb99e ZW |
4849 | && (true_rtx == pc_rtx |
4850 | || (CONSTANT_P (true_rtx) | |
4851 | && GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx) | |
4852 | || true_rtx == const0_rtx | |
ec8e098d PB |
4853 | || (OBJECT_P (true_rtx) && !OBJECT_P (false_rtx)) |
4854 | || (GET_CODE (true_rtx) == SUBREG && OBJECT_P (SUBREG_REG (true_rtx)) | |
4855 | && !OBJECT_P (false_rtx)) | |
d6edb99e ZW |
4856 | || reg_mentioned_p (true_rtx, false_rtx) |
4857 | || rtx_equal_p (false_rtx, XEXP (cond, 0)))) | |
8079805d | 4858 | { |
9a915772 | 4859 | true_code = reversed_comparison_code (cond, NULL); |
8079805d | 4860 | SUBST (XEXP (x, 0), |
9a915772 JH |
4861 | reversed_comparison (cond, GET_MODE (cond), XEXP (cond, 0), |
4862 | XEXP (cond, 1))); | |
5109d49f | 4863 | |
d6edb99e ZW |
4864 | SUBST (XEXP (x, 1), false_rtx); |
4865 | SUBST (XEXP (x, 2), true_rtx); | |
1a26b032 | 4866 | |
d6edb99e ZW |
4867 | temp = true_rtx, true_rtx = false_rtx, false_rtx = temp; |
4868 | cond = XEXP (x, 0); | |
bb821298 | 4869 | |
0f41302f | 4870 | /* It is possible that the conditional has been simplified out. */ |
bb821298 | 4871 | true_code = GET_CODE (cond); |
ec8e098d | 4872 | comparison_p = COMPARISON_P (cond); |
8079805d | 4873 | } |
abe6e52f | 4874 | |
8079805d | 4875 | /* If the two arms are identical, we don't need the comparison. */ |
1a26b032 | 4876 | |
d6edb99e ZW |
4877 | if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond)) |
4878 | return true_rtx; | |
1a26b032 | 4879 | |
5be669c7 RK |
4880 | /* Convert a == b ? b : a to "a". */ |
4881 | if (true_code == EQ && ! side_effects_p (cond) | |
73e42cf3 | 4882 | && !HONOR_NANS (mode) |
d6edb99e ZW |
4883 | && rtx_equal_p (XEXP (cond, 0), false_rtx) |
4884 | && rtx_equal_p (XEXP (cond, 1), true_rtx)) | |
4885 | return false_rtx; | |
5be669c7 | 4886 | else if (true_code == NE && ! side_effects_p (cond) |
73e42cf3 | 4887 | && !HONOR_NANS (mode) |
d6edb99e ZW |
4888 | && rtx_equal_p (XEXP (cond, 0), true_rtx) |
4889 | && rtx_equal_p (XEXP (cond, 1), false_rtx)) | |
4890 | return true_rtx; | |
5be669c7 | 4891 | |
8079805d RK |
4892 | /* Look for cases where we have (abs x) or (neg (abs X)). */ |
4893 | ||
4894 | if (GET_MODE_CLASS (mode) == MODE_INT | |
d6edb99e ZW |
4895 | && GET_CODE (false_rtx) == NEG |
4896 | && rtx_equal_p (true_rtx, XEXP (false_rtx, 0)) | |
8079805d | 4897 | && comparison_p |
d6edb99e ZW |
4898 | && rtx_equal_p (true_rtx, XEXP (cond, 0)) |
4899 | && ! side_effects_p (true_rtx)) | |
8079805d RK |
4900 | switch (true_code) |
4901 | { | |
4902 | case GT: | |
4903 | case GE: | |
f1c6ba8b | 4904 | return simplify_gen_unary (ABS, mode, true_rtx, mode); |
8079805d RK |
4905 | case LT: |
4906 | case LE: | |
f1c6ba8b RK |
4907 | return |
4908 | simplify_gen_unary (NEG, mode, | |
4909 | simplify_gen_unary (ABS, mode, true_rtx, mode), | |
4910 | mode); | |
cf0d9408 KH |
4911 | default: |
4912 | break; | |
8079805d RK |
4913 | } |
4914 | ||
4915 | /* Look for MIN or MAX. */ | |
4916 | ||
de6c5979 | 4917 | if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations) |
8079805d | 4918 | && comparison_p |
d6edb99e ZW |
4919 | && rtx_equal_p (XEXP (cond, 0), true_rtx) |
4920 | && rtx_equal_p (XEXP (cond, 1), false_rtx) | |
8079805d RK |
4921 | && ! side_effects_p (cond)) |
4922 | switch (true_code) | |
4923 | { | |
4924 | case GE: | |
4925 | case GT: | |
1999435c | 4926 | return gen_binary (SMAX, mode, true_rtx, false_rtx); |
8079805d RK |
4927 | case LE: |
4928 | case LT: | |
1999435c | 4929 | return gen_binary (SMIN, mode, true_rtx, false_rtx); |
8079805d RK |
4930 | case GEU: |
4931 | case GTU: | |
1999435c | 4932 | return gen_binary (UMAX, mode, true_rtx, false_rtx); |
8079805d RK |
4933 | case LEU: |
4934 | case LTU: | |
1999435c | 4935 | return gen_binary (UMIN, mode, true_rtx, false_rtx); |
e9a25f70 JL |
4936 | default: |
4937 | break; | |
8079805d | 4938 | } |
663522cb | 4939 | |
8079805d RK |
4940 | /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its |
4941 | second operand is zero, this can be done as (OP Z (mult COND C2)) where | |
4942 | C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or | |
4943 | SIGN_EXTEND as long as Z is already extended (so we don't destroy it). | |
4944 | We can do this kind of thing in some cases when STORE_FLAG_VALUE is | |
0802d516 | 4945 | neither 1 or -1, but it isn't worth checking for. */ |
8079805d | 4946 | |
0802d516 | 4947 | if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) |
02484af9 EB |
4948 | && comparison_p |
4949 | && GET_MODE_CLASS (mode) == MODE_INT | |
4950 | && ! side_effects_p (x)) | |
8079805d | 4951 | { |
d6edb99e ZW |
4952 | rtx t = make_compound_operation (true_rtx, SET); |
4953 | rtx f = make_compound_operation (false_rtx, SET); | |
8079805d RK |
4954 | rtx cond_op0 = XEXP (cond, 0); |
4955 | rtx cond_op1 = XEXP (cond, 1); | |
f822d252 | 4956 | enum rtx_code op = UNKNOWN, extend_op = UNKNOWN; |
8079805d | 4957 | enum machine_mode m = mode; |
6a651371 | 4958 | rtx z = 0, c1 = NULL_RTX; |
8079805d | 4959 | |
8079805d RK |
4960 | if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS |
4961 | || GET_CODE (t) == IOR || GET_CODE (t) == XOR | |
4962 | || GET_CODE (t) == ASHIFT | |
4963 | || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT) | |
4964 | && rtx_equal_p (XEXP (t, 0), f)) | |
4965 | c1 = XEXP (t, 1), op = GET_CODE (t), z = f; | |
4966 | ||
4967 | /* If an identity-zero op is commutative, check whether there | |
0f41302f | 4968 | would be a match if we swapped the operands. */ |
8079805d RK |
4969 | else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR |
4970 | || GET_CODE (t) == XOR) | |
4971 | && rtx_equal_p (XEXP (t, 1), f)) | |
4972 | c1 = XEXP (t, 0), op = GET_CODE (t), z = f; | |
4973 | else if (GET_CODE (t) == SIGN_EXTEND | |
4974 | && (GET_CODE (XEXP (t, 0)) == PLUS | |
4975 | || GET_CODE (XEXP (t, 0)) == MINUS | |
4976 | || GET_CODE (XEXP (t, 0)) == IOR | |
4977 | || GET_CODE (XEXP (t, 0)) == XOR | |
4978 | || GET_CODE (XEXP (t, 0)) == ASHIFT | |
4979 | || GET_CODE (XEXP (t, 0)) == LSHIFTRT | |
4980 | || GET_CODE (XEXP (t, 0)) == ASHIFTRT) | |
4981 | && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG | |
4982 | && subreg_lowpart_p (XEXP (XEXP (t, 0), 0)) | |
4983 | && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f) | |
4984 | && (num_sign_bit_copies (f, GET_MODE (f)) | |
26c34780 RS |
4985 | > (unsigned int) |
4986 | (GET_MODE_BITSIZE (mode) | |
8079805d RK |
4987 | - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0)))))) |
4988 | { | |
4989 | c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0)); | |
4990 | extend_op = SIGN_EXTEND; | |
4991 | m = GET_MODE (XEXP (t, 0)); | |
1a26b032 | 4992 | } |
8079805d RK |
4993 | else if (GET_CODE (t) == SIGN_EXTEND |
4994 | && (GET_CODE (XEXP (t, 0)) == PLUS | |
4995 | || GET_CODE (XEXP (t, 0)) == IOR | |
4996 | || GET_CODE (XEXP (t, 0)) == XOR) | |
4997 | && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG | |
4998 | && subreg_lowpart_p (XEXP (XEXP (t, 0), 1)) | |
4999 | && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f) | |
5000 | && (num_sign_bit_copies (f, GET_MODE (f)) | |
26c34780 RS |
5001 | > (unsigned int) |
5002 | (GET_MODE_BITSIZE (mode) | |
8079805d RK |
5003 | - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1)))))) |
5004 | { | |
5005 | c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0)); | |
5006 | extend_op = SIGN_EXTEND; | |
5007 | m = GET_MODE (XEXP (t, 0)); | |
5008 | } | |
5009 | else if (GET_CODE (t) == ZERO_EXTEND | |
5010 | && (GET_CODE (XEXP (t, 0)) == PLUS | |
5011 | || GET_CODE (XEXP (t, 0)) == MINUS | |
5012 | || GET_CODE (XEXP (t, 0)) == IOR | |
5013 | || GET_CODE (XEXP (t, 0)) == XOR | |
5014 | || GET_CODE (XEXP (t, 0)) == ASHIFT | |
5015 | || GET_CODE (XEXP (t, 0)) == LSHIFTRT | |
5016 | || GET_CODE (XEXP (t, 0)) == ASHIFTRT) | |
5017 | && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG | |
5018 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
5019 | && subreg_lowpart_p (XEXP (XEXP (t, 0), 0)) | |
5020 | && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f) | |
5021 | && ((nonzero_bits (f, GET_MODE (f)) | |
663522cb | 5022 | & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0)))) |
8079805d RK |
5023 | == 0)) |
5024 | { | |
5025 | c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0)); | |
5026 | extend_op = ZERO_EXTEND; | |
5027 | m = GET_MODE (XEXP (t, 0)); | |
5028 | } | |
5029 | else if (GET_CODE (t) == ZERO_EXTEND | |
5030 | && (GET_CODE (XEXP (t, 0)) == PLUS | |
5031 | || GET_CODE (XEXP (t, 0)) == IOR | |
5032 | || GET_CODE (XEXP (t, 0)) == XOR) | |
5033 | && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG | |
5034 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
5035 | && subreg_lowpart_p (XEXP (XEXP (t, 0), 1)) | |
5036 | && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f) | |
5037 | && ((nonzero_bits (f, GET_MODE (f)) | |
663522cb | 5038 | & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1)))) |
8079805d RK |
5039 | == 0)) |
5040 | { | |
5041 | c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0)); | |
5042 | extend_op = ZERO_EXTEND; | |
5043 | m = GET_MODE (XEXP (t, 0)); | |
5044 | } | |
663522cb | 5045 | |
8079805d RK |
5046 | if (z) |
5047 | { | |
1999435c | 5048 | temp = subst (gen_binary (true_code, m, cond_op0, cond_op1), |
8079805d | 5049 | pc_rtx, pc_rtx, 0, 0); |
1999435c PB |
5050 | temp = gen_binary (MULT, m, temp, |
5051 | gen_binary (MULT, m, c1, const_true_rtx)); | |
8079805d | 5052 | temp = subst (temp, pc_rtx, pc_rtx, 0, 0); |
1999435c | 5053 | temp = gen_binary (op, m, gen_lowpart (m, z), temp); |
8079805d | 5054 | |
f822d252 | 5055 | if (extend_op != UNKNOWN) |
f1c6ba8b | 5056 | temp = simplify_gen_unary (extend_op, mode, temp, m); |
8079805d RK |
5057 | |
5058 | return temp; | |
5059 | } | |
5060 | } | |
224eeff2 | 5061 | |
8079805d RK |
5062 | /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or |
5063 | 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the | |
5064 | negation of a single bit, we can convert this operation to a shift. We | |
5065 | can actually do this more generally, but it doesn't seem worth it. */ | |
5066 | ||
5067 | if (true_code == NE && XEXP (cond, 1) == const0_rtx | |
d6edb99e | 5068 | && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT |
8079805d | 5069 | && ((1 == nonzero_bits (XEXP (cond, 0), mode) |
d6edb99e | 5070 | && (i = exact_log2 (INTVAL (true_rtx))) >= 0) |
8079805d RK |
5071 | || ((num_sign_bit_copies (XEXP (cond, 0), mode) |
5072 | == GET_MODE_BITSIZE (mode)) | |
d6edb99e | 5073 | && (i = exact_log2 (-INTVAL (true_rtx))) >= 0))) |
8079805d RK |
5074 | return |
5075 | simplify_shift_const (NULL_RTX, ASHIFT, mode, | |
4de249d9 | 5076 | gen_lowpart (mode, XEXP (cond, 0)), i); |
230d793d | 5077 | |
83588a9d JH |
5078 | /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8. */ |
5079 | if (true_code == NE && XEXP (cond, 1) == const0_rtx | |
5080 | && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT | |
db33236e | 5081 | && GET_MODE (XEXP (cond, 0)) == mode |
83588a9d JH |
5082 | && (INTVAL (true_rtx) & GET_MODE_MASK (mode)) |
5083 | == nonzero_bits (XEXP (cond, 0), mode) | |
5084 | && (i = exact_log2 (INTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0) | |
5085 | return XEXP (cond, 0); | |
5086 | ||
8079805d RK |
5087 | return x; |
5088 | } | |
5089 | \f | |
5090 | /* Simplify X, a SET expression. Return the new expression. */ | |
230d793d | 5091 | |
8079805d | 5092 | static rtx |
79a490a9 | 5093 | simplify_set (rtx x) |
8079805d RK |
5094 | { |
5095 | rtx src = SET_SRC (x); | |
5096 | rtx dest = SET_DEST (x); | |
5097 | enum machine_mode mode | |
5098 | = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest); | |
5099 | rtx other_insn; | |
5100 | rtx *cc_use; | |
5101 | ||
5102 | /* (set (pc) (return)) gets written as (return). */ | |
5103 | if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN) | |
5104 | return src; | |
230d793d | 5105 | |
87e3e0c1 RK |
5106 | /* Now that we know for sure which bits of SRC we are using, see if we can |
5107 | simplify the expression for the object knowing that we only need the | |
5108 | low-order bits. */ | |
5109 | ||
855c3a2e IS |
5110 | if (GET_MODE_CLASS (mode) == MODE_INT |
5111 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
c5c76735 | 5112 | { |
e8dc6d50 | 5113 | src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0); |
c5c76735 JL |
5114 | SUBST (SET_SRC (x), src); |
5115 | } | |
87e3e0c1 | 5116 | |
8079805d RK |
5117 | /* If we are setting CC0 or if the source is a COMPARE, look for the use of |
5118 | the comparison result and try to simplify it unless we already have used | |
5119 | undobuf.other_insn. */ | |
dbf4f1a2 RS |
5120 | if ((GET_MODE_CLASS (mode) == MODE_CC |
5121 | || GET_CODE (src) == COMPARE | |
5122 | || CC0_P (dest)) | |
8079805d RK |
5123 | && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0 |
5124 | && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn) | |
ec8e098d | 5125 | && COMPARISON_P (*cc_use) |
c0d3ac4d | 5126 | && rtx_equal_p (XEXP (*cc_use, 0), dest)) |
8079805d RK |
5127 | { |
5128 | enum rtx_code old_code = GET_CODE (*cc_use); | |
5129 | enum rtx_code new_code; | |
f40f4c8e | 5130 | rtx op0, op1, tmp; |
8079805d RK |
5131 | int other_changed = 0; |
5132 | enum machine_mode compare_mode = GET_MODE (dest); | |
5133 | ||
5134 | if (GET_CODE (src) == COMPARE) | |
5135 | op0 = XEXP (src, 0), op1 = XEXP (src, 1); | |
5136 | else | |
8abcb0f7 | 5137 | op0 = src, op1 = CONST0_RTX (GET_MODE (src)); |
230d793d | 5138 | |
c6fb08ad PB |
5139 | tmp = simplify_relational_operation (old_code, compare_mode, VOIDmode, |
5140 | op0, op1); | |
5141 | if (!tmp) | |
5142 | new_code = old_code; | |
5143 | else if (!CONSTANT_P (tmp)) | |
5144 | { | |
5145 | new_code = GET_CODE (tmp); | |
5146 | op0 = XEXP (tmp, 0); | |
5147 | op1 = XEXP (tmp, 1); | |
5148 | } | |
f40f4c8e | 5149 | else |
f40f4c8e RS |
5150 | { |
5151 | rtx pat = PATTERN (other_insn); | |
5152 | undobuf.other_insn = other_insn; | |
5153 | SUBST (*cc_use, tmp); | |
5154 | ||
5155 | /* Attempt to simplify CC user. */ | |
5156 | if (GET_CODE (pat) == SET) | |
5157 | { | |
5158 | rtx new = simplify_rtx (SET_SRC (pat)); | |
5159 | if (new != NULL_RTX) | |
5160 | SUBST (SET_SRC (pat), new); | |
5161 | } | |
5162 | ||
5163 | /* Convert X into a no-op move. */ | |
5164 | SUBST (SET_DEST (x), pc_rtx); | |
5165 | SUBST (SET_SRC (x), pc_rtx); | |
5166 | return x; | |
5167 | } | |
5168 | ||
8079805d | 5169 | /* Simplify our comparison, if possible. */ |
c6fb08ad | 5170 | new_code = simplify_comparison (new_code, &op0, &op1); |
230d793d | 5171 | |
94134f42 | 5172 | #ifdef SELECT_CC_MODE |
8079805d RK |
5173 | /* If this machine has CC modes other than CCmode, check to see if we |
5174 | need to use a different CC mode here. */ | |
c6fb08ad PB |
5175 | if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC) |
5176 | compare_mode = GET_MODE (op0); | |
5177 | else | |
5178 | compare_mode = SELECT_CC_MODE (new_code, op0, op1); | |
230d793d | 5179 | |
94134f42 | 5180 | #ifndef HAVE_cc0 |
8079805d RK |
5181 | /* If the mode changed, we have to change SET_DEST, the mode in the |
5182 | compare, and the mode in the place SET_DEST is used. If SET_DEST is | |
5183 | a hard register, just build new versions with the proper mode. If it | |
5184 | is a pseudo, we lose unless it is only time we set the pseudo, in | |
5185 | which case we can safely change its mode. */ | |
5186 | if (compare_mode != GET_MODE (dest)) | |
5187 | { | |
770ae6cc | 5188 | unsigned int regno = REGNO (dest); |
38a448ca | 5189 | rtx new_dest = gen_rtx_REG (compare_mode, regno); |
8079805d RK |
5190 | |
5191 | if (regno < FIRST_PSEUDO_REGISTER | |
b1f21e0a | 5192 | || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest))) |
230d793d | 5193 | { |
8079805d RK |
5194 | if (regno >= FIRST_PSEUDO_REGISTER) |
5195 | SUBST (regno_reg_rtx[regno], new_dest); | |
230d793d | 5196 | |
8079805d RK |
5197 | SUBST (SET_DEST (x), new_dest); |
5198 | SUBST (XEXP (*cc_use, 0), new_dest); | |
5199 | other_changed = 1; | |
230d793d | 5200 | |
8079805d | 5201 | dest = new_dest; |
230d793d | 5202 | } |
8079805d | 5203 | } |
94134f42 ZW |
5204 | #endif /* cc0 */ |
5205 | #endif /* SELECT_CC_MODE */ | |
230d793d | 5206 | |
8079805d RK |
5207 | /* If the code changed, we have to build a new comparison in |
5208 | undobuf.other_insn. */ | |
5209 | if (new_code != old_code) | |
5210 | { | |
2051c897 | 5211 | int other_changed_previously = other_changed; |
8079805d RK |
5212 | unsigned HOST_WIDE_INT mask; |
5213 | ||
f1c6ba8b RK |
5214 | SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use), |
5215 | dest, const0_rtx)); | |
2051c897 | 5216 | other_changed = 1; |
8079805d RK |
5217 | |
5218 | /* If the only change we made was to change an EQ into an NE or | |
5219 | vice versa, OP0 has only one bit that might be nonzero, and OP1 | |
5220 | is zero, check if changing the user of the condition code will | |
5221 | produce a valid insn. If it won't, we can keep the original code | |
5222 | in that insn by surrounding our operation with an XOR. */ | |
5223 | ||
5224 | if (((old_code == NE && new_code == EQ) | |
5225 | || (old_code == EQ && new_code == NE)) | |
2051c897 | 5226 | && ! other_changed_previously && op1 == const0_rtx |
8079805d RK |
5227 | && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT |
5228 | && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0) | |
230d793d | 5229 | { |
8079805d | 5230 | rtx pat = PATTERN (other_insn), note = 0; |
230d793d | 5231 | |
8e2f6e35 | 5232 | if ((recog_for_combine (&pat, other_insn, ¬e) < 0 |
8079805d RK |
5233 | && ! check_asm_operands (pat))) |
5234 | { | |
5235 | PUT_CODE (*cc_use, old_code); | |
2051c897 | 5236 | other_changed = 0; |
230d793d | 5237 | |
1999435c | 5238 | op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask)); |
230d793d | 5239 | } |
230d793d | 5240 | } |
8079805d RK |
5241 | } |
5242 | ||
5243 | if (other_changed) | |
5244 | undobuf.other_insn = other_insn; | |
230d793d RS |
5245 | |
5246 | #ifdef HAVE_cc0 | |
8079805d RK |
5247 | /* If we are now comparing against zero, change our source if |
5248 | needed. If we do not use cc0, we always have a COMPARE. */ | |
5249 | if (op1 == const0_rtx && dest == cc0_rtx) | |
5250 | { | |
5251 | SUBST (SET_SRC (x), op0); | |
5252 | src = op0; | |
5253 | } | |
5254 | else | |
230d793d RS |
5255 | #endif |
5256 | ||
8079805d RK |
5257 | /* Otherwise, if we didn't previously have a COMPARE in the |
5258 | correct mode, we need one. */ | |
5259 | if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode) | |
5260 | { | |
f1c6ba8b | 5261 | SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1)); |
8079805d | 5262 | src = SET_SRC (x); |
230d793d RS |
5263 | } |
5264 | else | |
5265 | { | |
8079805d RK |
5266 | /* Otherwise, update the COMPARE if needed. */ |
5267 | SUBST (XEXP (src, 0), op0); | |
5268 | SUBST (XEXP (src, 1), op1); | |
230d793d | 5269 | } |
8079805d RK |
5270 | } |
5271 | else | |
5272 | { | |
5273 | /* Get SET_SRC in a form where we have placed back any | |
5274 | compound expressions. Then do the checks below. */ | |
5275 | src = make_compound_operation (src, SET); | |
5276 | SUBST (SET_SRC (x), src); | |
5277 | } | |
230d793d | 5278 | |
8079805d RK |
5279 | /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation, |
5280 | and X being a REG or (subreg (reg)), we may be able to convert this to | |
663522cb | 5281 | (set (subreg:m2 x) (op)). |
df62f951 | 5282 | |
5c881655 KH |
5283 | We can always do this if M1 is narrower than M2 because that means that |
5284 | we only care about the low bits of the result. | |
df62f951 | 5285 | |
5c881655 KH |
5286 | However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot |
5287 | perform a narrower operation than requested since the high-order bits will | |
5288 | be undefined. On machine where it is defined, this transformation is safe | |
5289 | as long as M1 and M2 have the same number of words. */ | |
663522cb | 5290 | |
8079805d | 5291 | if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src) |
ec8e098d | 5292 | && !OBJECT_P (SUBREG_REG (src)) |
8079805d RK |
5293 | && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1)) |
5294 | / UNITS_PER_WORD) | |
5295 | == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))) | |
5296 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)) | |
5c881655 KH |
5297 | #ifndef WORD_REGISTER_OPERATIONS |
5298 | && (GET_MODE_SIZE (GET_MODE (src)) | |
5299 | < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))) | |
5300 | #endif | |
cff9f8d5 | 5301 | #ifdef CANNOT_CHANGE_MODE_CLASS |
f8cfc6aa | 5302 | && ! (REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER |
cff9f8d5 | 5303 | && REG_CANNOT_CHANGE_MODE_P (REGNO (dest), |
73a39fc4 | 5304 | GET_MODE (SUBREG_REG (src)), |
b0c42aed | 5305 | GET_MODE (src))) |
663522cb | 5306 | #endif |
f8cfc6aa | 5307 | && (REG_P (dest) |
8079805d | 5308 | || (GET_CODE (dest) == SUBREG |
f8cfc6aa | 5309 | && REG_P (SUBREG_REG (dest))))) |
8079805d RK |
5310 | { |
5311 | SUBST (SET_DEST (x), | |
4de249d9 | 5312 | gen_lowpart (GET_MODE (SUBREG_REG (src)), |
8079805d RK |
5313 | dest)); |
5314 | SUBST (SET_SRC (x), SUBREG_REG (src)); | |
5315 | ||
5316 | src = SET_SRC (x), dest = SET_DEST (x); | |
5317 | } | |
df62f951 | 5318 | |
8c1d52a3 KH |
5319 | #ifdef HAVE_cc0 |
5320 | /* If we have (set (cc0) (subreg ...)), we try to remove the subreg | |
5321 | in SRC. */ | |
5322 | if (dest == cc0_rtx | |
5323 | && GET_CODE (src) == SUBREG | |
5324 | && subreg_lowpart_p (src) | |
5325 | && (GET_MODE_BITSIZE (GET_MODE (src)) | |
5326 | < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src))))) | |
5327 | { | |
5328 | rtx inner = SUBREG_REG (src); | |
5329 | enum machine_mode inner_mode = GET_MODE (inner); | |
5330 | ||
5331 | /* Here we make sure that we don't have a sign bit on. */ | |
5332 | if (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT | |
5333 | && (nonzero_bits (inner, inner_mode) | |
5334 | < ((unsigned HOST_WIDE_INT) 1 | |
ff076520 | 5335 | << (GET_MODE_BITSIZE (GET_MODE (src)) - 1)))) |
8c1d52a3 KH |
5336 | { |
5337 | SUBST (SET_SRC (x), inner); | |
5338 | src = SET_SRC (x); | |
5339 | } | |
5340 | } | |
5341 | #endif | |
5342 | ||
8baf60bb | 5343 | #ifdef LOAD_EXTEND_OP |
8079805d RK |
5344 | /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this |
5345 | would require a paradoxical subreg. Replace the subreg with a | |
0f41302f | 5346 | zero_extend to avoid the reload that would otherwise be required. */ |
8079805d RK |
5347 | |
5348 | if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src) | |
f822d252 | 5349 | && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != UNKNOWN |
ddef6bc7 | 5350 | && SUBREG_BYTE (src) == 0 |
8079805d RK |
5351 | && (GET_MODE_SIZE (GET_MODE (src)) |
5352 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))) | |
3c0cb5de | 5353 | && MEM_P (SUBREG_REG (src))) |
8079805d RK |
5354 | { |
5355 | SUBST (SET_SRC (x), | |
2fb00d7f KH |
5356 | gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))), |
5357 | GET_MODE (src), SUBREG_REG (src))); | |
8079805d RK |
5358 | |
5359 | src = SET_SRC (x); | |
5360 | } | |
230d793d RS |
5361 | #endif |
5362 | ||
8079805d RK |
5363 | /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we |
5364 | are comparing an item known to be 0 or -1 against 0, use a logical | |
5365 | operation instead. Check for one of the arms being an IOR of the other | |
5366 | arm with some value. We compute three terms to be IOR'ed together. In | |
5367 | practice, at most two will be nonzero. Then we do the IOR's. */ | |
5368 | ||
5369 | if (GET_CODE (dest) != PC | |
5370 | && GET_CODE (src) == IF_THEN_ELSE | |
36b8d792 | 5371 | && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT |
8079805d RK |
5372 | && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE) |
5373 | && XEXP (XEXP (src, 0), 1) == const0_rtx | |
6dd49058 | 5374 | && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0)) |
ea414472 DE |
5375 | #ifdef HAVE_conditional_move |
5376 | && ! can_conditionally_move_p (GET_MODE (src)) | |
5377 | #endif | |
8079805d RK |
5378 | && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0), |
5379 | GET_MODE (XEXP (XEXP (src, 0), 0))) | |
5380 | == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0)))) | |
5381 | && ! side_effects_p (src)) | |
5382 | { | |
d6edb99e | 5383 | rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE |
8079805d | 5384 | ? XEXP (src, 1) : XEXP (src, 2)); |
d6edb99e | 5385 | rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE |
8079805d RK |
5386 | ? XEXP (src, 2) : XEXP (src, 1)); |
5387 | rtx term1 = const0_rtx, term2, term3; | |
5388 | ||
d6edb99e ZW |
5389 | if (GET_CODE (true_rtx) == IOR |
5390 | && rtx_equal_p (XEXP (true_rtx, 0), false_rtx)) | |
e869aa39 | 5391 | term1 = false_rtx, true_rtx = XEXP (true_rtx, 1), false_rtx = const0_rtx; |
d6edb99e ZW |
5392 | else if (GET_CODE (true_rtx) == IOR |
5393 | && rtx_equal_p (XEXP (true_rtx, 1), false_rtx)) | |
e869aa39 | 5394 | term1 = false_rtx, true_rtx = XEXP (true_rtx, 0), false_rtx = const0_rtx; |
d6edb99e ZW |
5395 | else if (GET_CODE (false_rtx) == IOR |
5396 | && rtx_equal_p (XEXP (false_rtx, 0), true_rtx)) | |
e869aa39 | 5397 | term1 = true_rtx, false_rtx = XEXP (false_rtx, 1), true_rtx = const0_rtx; |
d6edb99e ZW |
5398 | else if (GET_CODE (false_rtx) == IOR |
5399 | && rtx_equal_p (XEXP (false_rtx, 1), true_rtx)) | |
e869aa39 | 5400 | term1 = true_rtx, false_rtx = XEXP (false_rtx, 0), true_rtx = const0_rtx; |
d6edb99e | 5401 | |
1999435c PB |
5402 | term2 = gen_binary (AND, GET_MODE (src), |
5403 | XEXP (XEXP (src, 0), 0), true_rtx); | |
5404 | term3 = gen_binary (AND, GET_MODE (src), | |
5405 | simplify_gen_unary (NOT, GET_MODE (src), | |
5406 | XEXP (XEXP (src, 0), 0), | |
5407 | GET_MODE (src)), | |
5408 | false_rtx); | |
8079805d RK |
5409 | |
5410 | SUBST (SET_SRC (x), | |
1999435c PB |
5411 | gen_binary (IOR, GET_MODE (src), |
5412 | gen_binary (IOR, GET_MODE (src), term1, term2), | |
5413 | term3)); | |
8079805d RK |
5414 | |
5415 | src = SET_SRC (x); | |
5416 | } | |
230d793d | 5417 | |
246e00f2 RK |
5418 | /* If either SRC or DEST is a CLOBBER of (const_int 0), make this |
5419 | whole thing fail. */ | |
5420 | if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx) | |
5421 | return src; | |
5422 | else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx) | |
5423 | return dest; | |
5424 | else | |
5425 | /* Convert this into a field assignment operation, if possible. */ | |
5426 | return make_field_assignment (x); | |
8079805d RK |
5427 | } |
5428 | \f | |
5429 | /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified | |
6621d78e | 5430 | result. */ |
8079805d RK |
5431 | |
5432 | static rtx | |
6621d78e | 5433 | simplify_logical (rtx x) |
8079805d RK |
5434 | { |
5435 | enum machine_mode mode = GET_MODE (x); | |
5436 | rtx op0 = XEXP (x, 0); | |
5437 | rtx op1 = XEXP (x, 1); | |
9a915772 | 5438 | rtx reversed; |
8079805d RK |
5439 | |
5440 | switch (GET_CODE (x)) | |
5441 | { | |
230d793d | 5442 | case AND: |
663522cb | 5443 | /* Convert (A ^ B) & A to A & (~B) since the latter is often a single |
8079805d RK |
5444 | insn (and may simplify more). */ |
5445 | if (GET_CODE (op0) == XOR | |
5446 | && rtx_equal_p (XEXP (op0, 0), op1) | |
5447 | && ! side_effects_p (op1)) | |
1999435c PB |
5448 | x = gen_binary (AND, mode, |
5449 | simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode), | |
5450 | op1); | |
8079805d RK |
5451 | |
5452 | if (GET_CODE (op0) == XOR | |
5453 | && rtx_equal_p (XEXP (op0, 1), op1) | |
5454 | && ! side_effects_p (op1)) | |
1999435c PB |
5455 | x = gen_binary (AND, mode, |
5456 | simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode), | |
5457 | op1); | |
8079805d | 5458 | |
663522cb | 5459 | /* Similarly for (~(A ^ B)) & A. */ |
8079805d RK |
5460 | if (GET_CODE (op0) == NOT |
5461 | && GET_CODE (XEXP (op0, 0)) == XOR | |
5462 | && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1) | |
5463 | && ! side_effects_p (op1)) | |
1999435c | 5464 | x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1); |
8079805d RK |
5465 | |
5466 | if (GET_CODE (op0) == NOT | |
5467 | && GET_CODE (XEXP (op0, 0)) == XOR | |
5468 | && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1) | |
5469 | && ! side_effects_p (op1)) | |
1999435c | 5470 | x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1); |
8079805d | 5471 | |
2e8f9abf DM |
5472 | /* We can call simplify_and_const_int only if we don't lose |
5473 | any (sign) bits when converting INTVAL (op1) to | |
5474 | "unsigned HOST_WIDE_INT". */ | |
5475 | if (GET_CODE (op1) == CONST_INT | |
5476 | && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
5477 | || INTVAL (op1) > 0)) | |
230d793d | 5478 | { |
8079805d | 5479 | x = simplify_and_const_int (x, mode, op0, INTVAL (op1)); |
230d793d RS |
5480 | |
5481 | /* If we have (ior (and (X C1) C2)) and the next restart would be | |
5482 | the last, simplify this by making C1 as small as possible | |
6621d78e PB |
5483 | and then exit. Only do this if C1 actually changes: for now |
5484 | this only saves memory but, should this transformation be | |
5485 | moved to simplify-rtx.c, we'd risk unbounded recursion there. */ | |
5486 | if (GET_CODE (x) == IOR && GET_CODE (op0) == AND | |
8079805d | 5487 | && GET_CODE (XEXP (op0, 1)) == CONST_INT |
6621d78e PB |
5488 | && GET_CODE (op1) == CONST_INT |
5489 | && (INTVAL (XEXP (op0, 1)) & INTVAL (op1)) != 0) | |
1999435c PB |
5490 | return gen_binary (IOR, mode, |
5491 | gen_binary (AND, mode, XEXP (op0, 0), | |
8079805d | 5492 | GEN_INT (INTVAL (XEXP (op0, 1)) |
663522cb | 5493 | & ~INTVAL (op1))), op1); |
230d793d RS |
5494 | |
5495 | if (GET_CODE (x) != AND) | |
8079805d | 5496 | return x; |
0e32506c | 5497 | |
ec8e098d PB |
5498 | op0 = XEXP (x, 0); |
5499 | op1 = XEXP (x, 1); | |
230d793d RS |
5500 | } |
5501 | ||
5502 | /* Convert (A | B) & A to A. */ | |
8079805d RK |
5503 | if (GET_CODE (op0) == IOR |
5504 | && (rtx_equal_p (XEXP (op0, 0), op1) | |
5505 | || rtx_equal_p (XEXP (op0, 1), op1)) | |
5506 | && ! side_effects_p (XEXP (op0, 0)) | |
5507 | && ! side_effects_p (XEXP (op0, 1))) | |
5508 | return op1; | |
230d793d | 5509 | |
1999435c PB |
5510 | /* In the following group of tests (and those in case IOR below), |
5511 | we start with some combination of logical operations and apply | |
5512 | the distributive law followed by the inverse distributive law. | |
5513 | Most of the time, this results in no change. However, if some of | |
5514 | the operands are the same or inverses of each other, simplifications | |
5515 | will result. | |
5516 | ||
5517 | For example, (and (ior A B) (not B)) can occur as the result of | |
5518 | expanding a bit field assignment. When we apply the distributive | |
5519 | law to this, we get (ior (and (A (not B))) (and (B (not B)))), | |
5520 | which then simplifies to (and (A (not B))). | |
5521 | ||
5522 | If we have (and (ior A B) C), apply the distributive law and then | |
5523 | the inverse distributive law to see if things simplify. */ | |
5524 | ||
5525 | if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR) | |
230d793d | 5526 | { |
1999435c PB |
5527 | x = apply_distributive_law |
5528 | (gen_binary (GET_CODE (op0), mode, | |
5529 | gen_binary (AND, mode, XEXP (op0, 0), op1), | |
5530 | gen_binary (AND, mode, XEXP (op0, 1), | |
5531 | copy_rtx (op1)))); | |
5532 | if (GET_CODE (x) != AND) | |
5533 | return x; | |
230d793d | 5534 | } |
1999435c PB |
5535 | |
5536 | if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR) | |
5537 | return apply_distributive_law | |
5538 | (gen_binary (GET_CODE (op1), mode, | |
5539 | gen_binary (AND, mode, XEXP (op1, 0), op0), | |
5540 | gen_binary (AND, mode, XEXP (op1, 1), | |
5541 | copy_rtx (op0)))); | |
5542 | ||
5543 | /* Similarly, taking advantage of the fact that | |
5544 | (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */ | |
5545 | ||
5546 | if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR) | |
5547 | return apply_distributive_law | |
5548 | (gen_binary (XOR, mode, | |
5549 | gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)), | |
5550 | gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)), | |
5551 | XEXP (op1, 1)))); | |
5552 | ||
5553 | else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR) | |
5554 | return apply_distributive_law | |
5555 | (gen_binary (XOR, mode, | |
5556 | gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)), | |
5557 | gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1)))); | |
230d793d RS |
5558 | break; |
5559 | ||
5560 | case IOR: | |
951553af | 5561 | /* (ior A C) is C if all bits of A that might be nonzero are on in C. */ |
8079805d | 5562 | if (GET_CODE (op1) == CONST_INT |
ac49a949 | 5563 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
663522cb | 5564 | && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0) |
8079805d | 5565 | return op1; |
d0ab8cd3 | 5566 | |
230d793d | 5567 | /* Convert (A & B) | A to A. */ |
8079805d RK |
5568 | if (GET_CODE (op0) == AND |
5569 | && (rtx_equal_p (XEXP (op0, 0), op1) | |
5570 | || rtx_equal_p (XEXP (op0, 1), op1)) | |
5571 | && ! side_effects_p (XEXP (op0, 0)) | |
5572 | && ! side_effects_p (XEXP (op0, 1))) | |
5573 | return op1; | |
230d793d RS |
5574 | |
5575 | /* If we have (ior (and A B) C), apply the distributive law and then | |
5576 | the inverse distributive law to see if things simplify. */ | |
5577 | ||
1999435c PB |
5578 | if (GET_CODE (op0) == AND) |
5579 | { | |
5580 | x = apply_distributive_law | |
5581 | (gen_binary (AND, mode, | |
5582 | gen_binary (IOR, mode, XEXP (op0, 0), op1), | |
5583 | gen_binary (IOR, mode, XEXP (op0, 1), | |
5584 | copy_rtx (op1)))); | |
5585 | ||
5586 | if (GET_CODE (x) != IOR) | |
5587 | return x; | |
5588 | } | |
5589 | ||
5590 | if (GET_CODE (op1) == AND) | |
230d793d | 5591 | { |
1999435c PB |
5592 | x = apply_distributive_law |
5593 | (gen_binary (AND, mode, | |
5594 | gen_binary (IOR, mode, XEXP (op1, 0), op0), | |
5595 | gen_binary (IOR, mode, XEXP (op1, 1), | |
5596 | copy_rtx (op0)))); | |
5597 | ||
5598 | if (GET_CODE (x) != IOR) | |
5599 | return x; | |
230d793d RS |
5600 | } |
5601 | ||
5602 | /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the | |
5603 | mode size to (rotate A CX). */ | |
5604 | ||
8079805d RK |
5605 | if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT) |
5606 | || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT)) | |
5607 | && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0)) | |
5608 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
5609 | && GET_CODE (XEXP (op1, 1)) == CONST_INT | |
5610 | && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1)) | |
230d793d | 5611 | == GET_MODE_BITSIZE (mode))) |
38a448ca RH |
5612 | return gen_rtx_ROTATE (mode, XEXP (op0, 0), |
5613 | (GET_CODE (op0) == ASHIFT | |
5614 | ? XEXP (op0, 1) : XEXP (op1, 1))); | |
230d793d | 5615 | |
71923da7 RK |
5616 | /* If OP0 is (ashiftrt (plus ...) C), it might actually be |
5617 | a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS | |
5618 | does not affect any of the bits in OP1, it can really be done | |
5619 | as a PLUS and we can associate. We do this by seeing if OP1 | |
5620 | can be safely shifted left C bits. */ | |
5621 | if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT | |
5622 | && GET_CODE (XEXP (op0, 0)) == PLUS | |
5623 | && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | |
5624 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
5625 | && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT) | |
5626 | { | |
5627 | int count = INTVAL (XEXP (op0, 1)); | |
5628 | HOST_WIDE_INT mask = INTVAL (op1) << count; | |
5629 | ||
5630 | if (mask >> count == INTVAL (op1) | |
5631 | && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0) | |
5632 | { | |
5633 | SUBST (XEXP (XEXP (op0, 0), 1), | |
5634 | GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask)); | |
5635 | return op0; | |
5636 | } | |
5637 | } | |
230d793d RS |
5638 | break; |
5639 | ||
5640 | case XOR: | |
79e8185c JH |
5641 | /* If we are XORing two things that have no bits in common, |
5642 | convert them into an IOR. This helps to detect rotation encoded | |
5643 | using those methods and possibly other simplifications. */ | |
5644 | ||
5645 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
5646 | && (nonzero_bits (op0, mode) | |
5647 | & nonzero_bits (op1, mode)) == 0) | |
1999435c | 5648 | return (gen_binary (IOR, mode, op0, op1)); |
79e8185c | 5649 | |
230d793d RS |
5650 | /* Convert (XOR (NOT x) (NOT y)) to (XOR x y). |
5651 | Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for | |
5652 | (NOT y). */ | |
5653 | { | |
5654 | int num_negated = 0; | |
230d793d | 5655 | |
8079805d RK |
5656 | if (GET_CODE (op0) == NOT) |
5657 | num_negated++, op0 = XEXP (op0, 0); | |
5658 | if (GET_CODE (op1) == NOT) | |
5659 | num_negated++, op1 = XEXP (op1, 0); | |
230d793d RS |
5660 | |
5661 | if (num_negated == 2) | |
5662 | { | |
8079805d RK |
5663 | SUBST (XEXP (x, 0), op0); |
5664 | SUBST (XEXP (x, 1), op1); | |
230d793d RS |
5665 | } |
5666 | else if (num_negated == 1) | |
f1c6ba8b | 5667 | return |
1999435c | 5668 | simplify_gen_unary (NOT, mode, gen_binary (XOR, mode, op0, op1), |
f1c6ba8b | 5669 | mode); |
230d793d RS |
5670 | } |
5671 | ||
5672 | /* Convert (xor (and A B) B) to (and (not A) B). The latter may | |
5673 | correspond to a machine insn or result in further simplifications | |
5674 | if B is a constant. */ | |
5675 | ||
8079805d RK |
5676 | if (GET_CODE (op0) == AND |
5677 | && rtx_equal_p (XEXP (op0, 1), op1) | |
5678 | && ! side_effects_p (op1)) | |
1999435c PB |
5679 | return gen_binary (AND, mode, |
5680 | simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode), | |
5681 | op1); | |
230d793d | 5682 | |
8079805d RK |
5683 | else if (GET_CODE (op0) == AND |
5684 | && rtx_equal_p (XEXP (op0, 0), op1) | |
5685 | && ! side_effects_p (op1)) | |
1999435c PB |
5686 | return gen_binary (AND, mode, |
5687 | simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode), | |
5688 | op1); | |
230d793d | 5689 | |
230d793d | 5690 | /* (xor (comparison foo bar) (const_int 1)) can become the reversed |
0802d516 RK |
5691 | comparison if STORE_FLAG_VALUE is 1. */ |
5692 | if (STORE_FLAG_VALUE == 1 | |
5693 | && op1 == const1_rtx | |
ec8e098d | 5694 | && COMPARISON_P (op0) |
9a915772 JH |
5695 | && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0), |
5696 | XEXP (op0, 1)))) | |
5697 | return reversed; | |
500c518b RK |
5698 | |
5699 | /* (lshiftrt foo C) where C is the number of bits in FOO minus 1 | |
5700 | is (lt foo (const_int 0)), so we can perform the above | |
0802d516 | 5701 | simplification if STORE_FLAG_VALUE is 1. */ |
500c518b | 5702 | |
0802d516 RK |
5703 | if (STORE_FLAG_VALUE == 1 |
5704 | && op1 == const1_rtx | |
8079805d RK |
5705 | && GET_CODE (op0) == LSHIFTRT |
5706 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
5707 | && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1) | |
f1c6ba8b | 5708 | return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx); |
230d793d RS |
5709 | |
5710 | /* (xor (comparison foo bar) (const_int sign-bit)) | |
5711 | when STORE_FLAG_VALUE is the sign bit. */ | |
5f4f0e22 | 5712 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
0802d516 | 5713 | && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode)) |
e51712db | 5714 | == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1)) |
8079805d | 5715 | && op1 == const_true_rtx |
ec8e098d | 5716 | && COMPARISON_P (op0) |
9a915772 JH |
5717 | && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0), |
5718 | XEXP (op0, 1)))) | |
5719 | return reversed; | |
0918eca0 | 5720 | |
230d793d | 5721 | break; |
e9a25f70 JL |
5722 | |
5723 | default: | |
341c100f | 5724 | gcc_unreachable (); |
230d793d RS |
5725 | } |
5726 | ||
5727 | return x; | |
5728 | } | |
5729 | \f | |
5730 | /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound | |
5731 | operations" because they can be replaced with two more basic operations. | |
5732 | ZERO_EXTEND is also considered "compound" because it can be replaced with | |
5733 | an AND operation, which is simpler, though only one operation. | |
5734 | ||
5735 | The function expand_compound_operation is called with an rtx expression | |
663522cb | 5736 | and will convert it to the appropriate shifts and AND operations, |
230d793d RS |
5737 | simplifying at each stage. |
5738 | ||
5739 | The function make_compound_operation is called to convert an expression | |
5740 | consisting of shifts and ANDs into the equivalent compound expression. | |
5741 | It is the inverse of this function, loosely speaking. */ | |
5742 | ||
5743 | static rtx | |
79a490a9 | 5744 | expand_compound_operation (rtx x) |
230d793d | 5745 | { |
770ae6cc | 5746 | unsigned HOST_WIDE_INT pos = 0, len; |
230d793d | 5747 | int unsignedp = 0; |
770ae6cc | 5748 | unsigned int modewidth; |
230d793d RS |
5749 | rtx tem; |
5750 | ||
5751 | switch (GET_CODE (x)) | |
5752 | { | |
5753 | case ZERO_EXTEND: | |
5754 | unsignedp = 1; | |
5755 | case SIGN_EXTEND: | |
75473182 RS |
5756 | /* We can't necessarily use a const_int for a multiword mode; |
5757 | it depends on implicitly extending the value. | |
5758 | Since we don't know the right way to extend it, | |
5759 | we can't tell whether the implicit way is right. | |
5760 | ||
5761 | Even for a mode that is no wider than a const_int, | |
5762 | we can't win, because we need to sign extend one of its bits through | |
5763 | the rest of it, and we don't know which bit. */ | |
230d793d | 5764 | if (GET_CODE (XEXP (x, 0)) == CONST_INT) |
75473182 | 5765 | return x; |
230d793d | 5766 | |
8079805d RK |
5767 | /* Return if (subreg:MODE FROM 0) is not a safe replacement for |
5768 | (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM | |
5769 | because (SUBREG (MEM...)) is guaranteed to cause the MEM to be | |
5770 | reloaded. If not for that, MEM's would very rarely be safe. | |
5771 | ||
5772 | Reject MODEs bigger than a word, because we might not be able | |
5773 | to reference a two-register group starting with an arbitrary register | |
5774 | (and currently gen_lowpart might crash for a SUBREG). */ | |
663522cb | 5775 | |
8079805d | 5776 | if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD) |
230d793d RS |
5777 | return x; |
5778 | ||
71012d97 GK |
5779 | /* Reject MODEs that aren't scalar integers because turning vector |
5780 | or complex modes into shifts causes problems. */ | |
5781 | ||
5782 | if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0)))) | |
5783 | return x; | |
5784 | ||
230d793d RS |
5785 | len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))); |
5786 | /* If the inner object has VOIDmode (the only way this can happen | |
e0a2f705 | 5787 | is if it is an ASM_OPERANDS), we can't do anything since we don't |
230d793d RS |
5788 | know how much masking to do. */ |
5789 | if (len == 0) | |
5790 | return x; | |
5791 | ||
5792 | break; | |
5793 | ||
5794 | case ZERO_EXTRACT: | |
5795 | unsignedp = 1; | |
46d096a3 SB |
5796 | |
5797 | /* ... fall through ... */ | |
5798 | ||
230d793d RS |
5799 | case SIGN_EXTRACT: |
5800 | /* If the operand is a CLOBBER, just return it. */ | |
5801 | if (GET_CODE (XEXP (x, 0)) == CLOBBER) | |
5802 | return XEXP (x, 0); | |
5803 | ||
5804 | if (GET_CODE (XEXP (x, 1)) != CONST_INT | |
5805 | || GET_CODE (XEXP (x, 2)) != CONST_INT | |
5806 | || GET_MODE (XEXP (x, 0)) == VOIDmode) | |
5807 | return x; | |
5808 | ||
71012d97 GK |
5809 | /* Reject MODEs that aren't scalar integers because turning vector |
5810 | or complex modes into shifts causes problems. */ | |
5811 | ||
5812 | if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0)))) | |
5813 | return x; | |
5814 | ||
230d793d RS |
5815 | len = INTVAL (XEXP (x, 1)); |
5816 | pos = INTVAL (XEXP (x, 2)); | |
5817 | ||
5818 | /* If this goes outside the object being extracted, replace the object | |
5819 | with a (use (mem ...)) construct that only combine understands | |
5820 | and is used only for this purpose. */ | |
5821 | if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))) | |
38a448ca | 5822 | SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0))); |
230d793d | 5823 | |
f76b9db2 ILT |
5824 | if (BITS_BIG_ENDIAN) |
5825 | pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos; | |
5826 | ||
230d793d RS |
5827 | break; |
5828 | ||
5829 | default: | |
5830 | return x; | |
5831 | } | |
0f808b6f JH |
5832 | /* Convert sign extension to zero extension, if we know that the high |
5833 | bit is not set, as this is easier to optimize. It will be converted | |
5834 | back to cheaper alternative in make_extraction. */ | |
5835 | if (GET_CODE (x) == SIGN_EXTEND | |
5836 | && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT | |
5837 | && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0))) | |
663522cb | 5838 | & ~(((unsigned HOST_WIDE_INT) |
0f808b6f JH |
5839 | GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) |
5840 | >> 1)) | |
5841 | == 0))) | |
5842 | { | |
5843 | rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0)); | |
3dcd7d45 EC |
5844 | rtx temp2 = expand_compound_operation (temp); |
5845 | ||
5846 | /* Make sure this is a profitable operation. */ | |
5847 | if (rtx_cost (x, SET) > rtx_cost (temp2, SET)) | |
5848 | return temp2; | |
5849 | else if (rtx_cost (x, SET) > rtx_cost (temp, SET)) | |
5850 | return temp; | |
5851 | else | |
5852 | return x; | |
0f808b6f | 5853 | } |
230d793d | 5854 | |
0f13a422 ILT |
5855 | /* We can optimize some special cases of ZERO_EXTEND. */ |
5856 | if (GET_CODE (x) == ZERO_EXTEND) | |
5857 | { | |
5858 | /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we | |
5859 | know that the last value didn't have any inappropriate bits | |
5860 | set. */ | |
5861 | if (GET_CODE (XEXP (x, 0)) == TRUNCATE | |
5862 | && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x) | |
5863 | && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT | |
5864 | && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x)) | |
663522cb | 5865 | & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) |
0f13a422 ILT |
5866 | return XEXP (XEXP (x, 0), 0); |
5867 | ||
5868 | /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */ | |
5869 | if (GET_CODE (XEXP (x, 0)) == SUBREG | |
5870 | && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x) | |
5871 | && subreg_lowpart_p (XEXP (x, 0)) | |
5872 | && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT | |
5873 | && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x)) | |
663522cb | 5874 | & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) |
0f13a422 ILT |
5875 | return SUBREG_REG (XEXP (x, 0)); |
5876 | ||
5877 | /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo | |
5878 | is a comparison and STORE_FLAG_VALUE permits. This is like | |
5879 | the first case, but it works even when GET_MODE (x) is larger | |
5880 | than HOST_WIDE_INT. */ | |
5881 | if (GET_CODE (XEXP (x, 0)) == TRUNCATE | |
5882 | && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x) | |
ec8e098d | 5883 | && COMPARISON_P (XEXP (XEXP (x, 0), 0)) |
0f13a422 ILT |
5884 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) |
5885 | <= HOST_BITS_PER_WIDE_INT) | |
23190837 | 5886 | && ((HOST_WIDE_INT) STORE_FLAG_VALUE |
663522cb | 5887 | & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) |
0f13a422 ILT |
5888 | return XEXP (XEXP (x, 0), 0); |
5889 | ||
5890 | /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */ | |
5891 | if (GET_CODE (XEXP (x, 0)) == SUBREG | |
5892 | && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x) | |
5893 | && subreg_lowpart_p (XEXP (x, 0)) | |
ec8e098d | 5894 | && COMPARISON_P (SUBREG_REG (XEXP (x, 0))) |
0f13a422 ILT |
5895 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) |
5896 | <= HOST_BITS_PER_WIDE_INT) | |
5897 | && ((HOST_WIDE_INT) STORE_FLAG_VALUE | |
663522cb | 5898 | & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) |
0f13a422 ILT |
5899 | return SUBREG_REG (XEXP (x, 0)); |
5900 | ||
0f13a422 ILT |
5901 | } |
5902 | ||
230d793d RS |
5903 | /* If we reach here, we want to return a pair of shifts. The inner |
5904 | shift is a left shift of BITSIZE - POS - LEN bits. The outer | |
5905 | shift is a right shift of BITSIZE - LEN bits. It is arithmetic or | |
5906 | logical depending on the value of UNSIGNEDP. | |
5907 | ||
5908 | If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be | |
5909 | converted into an AND of a shift. | |
5910 | ||
5911 | We must check for the case where the left shift would have a negative | |
5912 | count. This can happen in a case like (x >> 31) & 255 on machines | |
5913 | that can't shift by a constant. On those machines, we would first | |
663522cb | 5914 | combine the shift with the AND to produce a variable-position |
230d793d RS |
5915 | extraction. Then the constant of 31 would be substituted in to produce |
5916 | a such a position. */ | |
5917 | ||
5918 | modewidth = GET_MODE_BITSIZE (GET_MODE (x)); | |
770ae6cc | 5919 | if (modewidth + len >= pos) |
5f4f0e22 | 5920 | tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT, |
230d793d | 5921 | GET_MODE (x), |
5f4f0e22 CH |
5922 | simplify_shift_const (NULL_RTX, ASHIFT, |
5923 | GET_MODE (x), | |
230d793d RS |
5924 | XEXP (x, 0), |
5925 | modewidth - pos - len), | |
5926 | modewidth - len); | |
5927 | ||
5f4f0e22 CH |
5928 | else if (unsignedp && len < HOST_BITS_PER_WIDE_INT) |
5929 | tem = simplify_and_const_int (NULL_RTX, GET_MODE (x), | |
5930 | simplify_shift_const (NULL_RTX, LSHIFTRT, | |
230d793d RS |
5931 | GET_MODE (x), |
5932 | XEXP (x, 0), pos), | |
5f4f0e22 | 5933 | ((HOST_WIDE_INT) 1 << len) - 1); |
230d793d RS |
5934 | else |
5935 | /* Any other cases we can't handle. */ | |
5936 | return x; | |
230d793d RS |
5937 | |
5938 | /* If we couldn't do this for some reason, return the original | |
5939 | expression. */ | |
5940 | if (GET_CODE (tem) == CLOBBER) | |
5941 | return x; | |
5942 | ||
5943 | return tem; | |
5944 | } | |
5945 | \f | |
5946 | /* X is a SET which contains an assignment of one object into | |
5947 | a part of another (such as a bit-field assignment, STRICT_LOW_PART, | |
5948 | or certain SUBREGS). If possible, convert it into a series of | |
5949 | logical operations. | |
5950 | ||
5951 | We half-heartedly support variable positions, but do not at all | |
5952 | support variable lengths. */ | |
5953 | ||
5954 | static rtx | |
79a490a9 | 5955 | expand_field_assignment (rtx x) |
230d793d RS |
5956 | { |
5957 | rtx inner; | |
0f41302f | 5958 | rtx pos; /* Always counts from low bit. */ |
230d793d | 5959 | int len; |
1999435c | 5960 | rtx mask; |
230d793d RS |
5961 | enum machine_mode compute_mode; |
5962 | ||
5963 | /* Loop until we find something we can't simplify. */ | |
5964 | while (1) | |
5965 | { | |
5966 | if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART | |
5967 | && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG) | |
5968 | { | |
5969 | inner = SUBREG_REG (XEXP (SET_DEST (x), 0)); | |
5970 | len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))); | |
47073a38 | 5971 | pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0))); |
230d793d RS |
5972 | } |
5973 | else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT | |
5974 | && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT) | |
5975 | { | |
5976 | inner = XEXP (SET_DEST (x), 0); | |
5977 | len = INTVAL (XEXP (SET_DEST (x), 1)); | |
5978 | pos = XEXP (SET_DEST (x), 2); | |
5979 | ||
5980 | /* If the position is constant and spans the width of INNER, | |
5981 | surround INNER with a USE to indicate this. */ | |
5982 | if (GET_CODE (pos) == CONST_INT | |
5983 | && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner))) | |
38a448ca | 5984 | inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner); |
230d793d | 5985 | |
f76b9db2 ILT |
5986 | if (BITS_BIG_ENDIAN) |
5987 | { | |
5988 | if (GET_CODE (pos) == CONST_INT) | |
5989 | pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len | |
5990 | - INTVAL (pos)); | |
5991 | else if (GET_CODE (pos) == MINUS | |
5992 | && GET_CODE (XEXP (pos, 1)) == CONST_INT | |
5993 | && (INTVAL (XEXP (pos, 1)) | |
5994 | == GET_MODE_BITSIZE (GET_MODE (inner)) - len)) | |
5995 | /* If position is ADJUST - X, new position is X. */ | |
5996 | pos = XEXP (pos, 0); | |
5997 | else | |
1999435c PB |
5998 | pos = gen_binary (MINUS, GET_MODE (pos), |
5999 | GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) | |
6000 | - len), | |
6001 | pos); | |
f76b9db2 | 6002 | } |
230d793d RS |
6003 | } |
6004 | ||
6005 | /* A SUBREG between two modes that occupy the same numbers of words | |
6006 | can be done by moving the SUBREG to the source. */ | |
6007 | else if (GET_CODE (SET_DEST (x)) == SUBREG | |
b1e9c8a9 AO |
6008 | /* We need SUBREGs to compute nonzero_bits properly. */ |
6009 | && nonzero_sign_valid | |
230d793d RS |
6010 | && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x))) |
6011 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) | |
6012 | == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x)))) | |
6013 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))) | |
6014 | { | |
38a448ca | 6015 | x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)), |
4de249d9 | 6016 | gen_lowpart |
c5c76735 JL |
6017 | (GET_MODE (SUBREG_REG (SET_DEST (x))), |
6018 | SET_SRC (x))); | |
230d793d RS |
6019 | continue; |
6020 | } | |
6021 | else | |
6022 | break; | |
6023 | ||
6024 | while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner)) | |
6025 | inner = SUBREG_REG (inner); | |
6026 | ||
6027 | compute_mode = GET_MODE (inner); | |
6028 | ||
71012d97 GK |
6029 | /* Don't attempt bitwise arithmetic on non scalar integer modes. */ |
6030 | if (! SCALAR_INT_MODE_P (compute_mode)) | |
861556b4 RH |
6031 | { |
6032 | enum machine_mode imode; | |
6033 | ||
71012d97 | 6034 | /* Don't do anything for vector or complex integral types. */ |
861556b4 RH |
6035 | if (! FLOAT_MODE_P (compute_mode)) |
6036 | break; | |
6037 | ||
6038 | /* Try to find an integral mode to pun with. */ | |
6039 | imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0); | |
6040 | if (imode == BLKmode) | |
6041 | break; | |
6042 | ||
6043 | compute_mode = imode; | |
4de249d9 | 6044 | inner = gen_lowpart (imode, inner); |
861556b4 RH |
6045 | } |
6046 | ||
230d793d | 6047 | /* Compute a mask of LEN bits, if we can do this on the host machine. */ |
1999435c PB |
6048 | if (len < HOST_BITS_PER_WIDE_INT) |
6049 | mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1); | |
6050 | else | |
230d793d RS |
6051 | break; |
6052 | ||
6053 | /* Now compute the equivalent expression. Make a copy of INNER | |
6054 | for the SET_DEST in case it is a MEM into which we will substitute; | |
6055 | we don't want shared RTL in that case. */ | |
1999435c PB |
6056 | x = gen_rtx_SET |
6057 | (VOIDmode, copy_rtx (inner), | |
6058 | gen_binary (IOR, compute_mode, | |
6059 | gen_binary (AND, compute_mode, | |
6060 | simplify_gen_unary (NOT, compute_mode, | |
6061 | gen_binary (ASHIFT, | |
6062 | compute_mode, | |
6063 | mask, pos), | |
6064 | compute_mode), | |
6065 | inner), | |
6066 | gen_binary (ASHIFT, compute_mode, | |
6067 | gen_binary (AND, compute_mode, | |
6068 | gen_lowpart | |
6069 | (compute_mode, SET_SRC (x)), | |
6070 | mask), | |
6071 | pos))); | |
230d793d RS |
6072 | } |
6073 | ||
6074 | return x; | |
6075 | } | |
6076 | \f | |
8999a12e RK |
6077 | /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero, |
6078 | it is an RTX that represents a variable starting position; otherwise, | |
6079 | POS is the (constant) starting bit position (counted from the LSB). | |
230d793d RS |
6080 | |
6081 | INNER may be a USE. This will occur when we started with a bitfield | |
6082 | that went outside the boundary of the object in memory, which is | |
6083 | allowed on most machines. To isolate this case, we produce a USE | |
6084 | whose mode is wide enough and surround the MEM with it. The only | |
6085 | code that understands the USE is this routine. If it is not removed, | |
6086 | it will cause the resulting insn not to match. | |
6087 | ||
da7d8304 | 6088 | UNSIGNEDP is nonzero for an unsigned reference and zero for a |
230d793d RS |
6089 | signed reference. |
6090 | ||
da7d8304 KH |
6091 | IN_DEST is nonzero if this is a reference in the destination of a |
6092 | SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If nonzero, | |
230d793d RS |
6093 | a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will |
6094 | be used. | |
6095 | ||
da7d8304 | 6096 | IN_COMPARE is nonzero if we are in a COMPARE. This means that a |
230d793d RS |
6097 | ZERO_EXTRACT should be built even for bits starting at bit 0. |
6098 | ||
76184def DE |
6099 | MODE is the desired mode of the result (if IN_DEST == 0). |
6100 | ||
6101 | The result is an RTX for the extraction or NULL_RTX if the target | |
6102 | can't handle it. */ | |
230d793d RS |
6103 | |
6104 | static rtx | |
79a490a9 AJ |
6105 | make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos, |
6106 | rtx pos_rtx, unsigned HOST_WIDE_INT len, int unsignedp, | |
6107 | int in_dest, int in_compare) | |
230d793d | 6108 | { |
94b4b17a RS |
6109 | /* This mode describes the size of the storage area |
6110 | to fetch the overall value from. Within that, we | |
6111 | ignore the POS lowest bits, etc. */ | |
230d793d RS |
6112 | enum machine_mode is_mode = GET_MODE (inner); |
6113 | enum machine_mode inner_mode; | |
d7cd794f RK |
6114 | enum machine_mode wanted_inner_mode = byte_mode; |
6115 | enum machine_mode wanted_inner_reg_mode = word_mode; | |
230d793d RS |
6116 | enum machine_mode pos_mode = word_mode; |
6117 | enum machine_mode extraction_mode = word_mode; | |
6118 | enum machine_mode tmode = mode_for_size (len, MODE_INT, 1); | |
6119 | int spans_byte = 0; | |
6120 | rtx new = 0; | |
8999a12e | 6121 | rtx orig_pos_rtx = pos_rtx; |
770ae6cc | 6122 | HOST_WIDE_INT orig_pos; |
230d793d RS |
6123 | |
6124 | /* Get some information about INNER and get the innermost object. */ | |
6125 | if (GET_CODE (inner) == USE) | |
94b4b17a | 6126 | /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */ |
230d793d RS |
6127 | /* We don't need to adjust the position because we set up the USE |
6128 | to pretend that it was a full-word object. */ | |
6129 | spans_byte = 1, inner = XEXP (inner, 0); | |
6130 | else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner)) | |
94b4b17a RS |
6131 | { |
6132 | /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...), | |
6133 | consider just the QI as the memory to extract from. | |
6134 | The subreg adds or removes high bits; its mode is | |
6135 | irrelevant to the meaning of this extraction, | |
6136 | since POS and LEN count from the lsb. */ | |
3c0cb5de | 6137 | if (MEM_P (SUBREG_REG (inner))) |
94b4b17a RS |
6138 | is_mode = GET_MODE (SUBREG_REG (inner)); |
6139 | inner = SUBREG_REG (inner); | |
6140 | } | |
988ef418 RS |
6141 | else if (GET_CODE (inner) == ASHIFT |
6142 | && GET_CODE (XEXP (inner, 1)) == CONST_INT | |
6143 | && pos_rtx == 0 && pos == 0 | |
3129af4c | 6144 | && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1))) |
988ef418 RS |
6145 | { |
6146 | /* We're extracting the least significant bits of an rtx | |
6147 | (ashift X (const_int C)), where LEN > C. Extract the | |
6148 | least significant (LEN - C) bits of X, giving an rtx | |
6149 | whose mode is MODE, then shift it left C times. */ | |
6150 | new = make_extraction (mode, XEXP (inner, 0), | |
6151 | 0, 0, len - INTVAL (XEXP (inner, 1)), | |
6152 | unsignedp, in_dest, in_compare); | |
6153 | if (new != 0) | |
6154 | return gen_rtx_ASHIFT (mode, new, XEXP (inner, 1)); | |
6155 | } | |
230d793d RS |
6156 | |
6157 | inner_mode = GET_MODE (inner); | |
6158 | ||
6159 | if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT) | |
8999a12e | 6160 | pos = INTVAL (pos_rtx), pos_rtx = 0; |
230d793d RS |
6161 | |
6162 | /* See if this can be done without an extraction. We never can if the | |
6163 | width of the field is not the same as that of some integer mode. For | |
6164 | registers, we can only avoid the extraction if the position is at the | |
6165 | low-order bit and this is either not in the destination or we have the | |
6166 | appropriate STRICT_LOW_PART operation available. | |
6167 | ||
6168 | For MEM, we can avoid an extract if the field starts on an appropriate | |
6169 | boundary and we can change the mode of the memory reference. However, | |
6170 | we cannot directly access the MEM if we have a USE and the underlying | |
6171 | MEM is not TMODE. This combination means that MEM was being used in a | |
6172 | context where bits outside its mode were being referenced; that is only | |
6173 | valid in bit-field insns. */ | |
6174 | ||
6175 | if (tmode != BLKmode | |
6176 | && ! (spans_byte && inner_mode != tmode) | |
4d9cfc7b | 6177 | && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0 |
3c0cb5de | 6178 | && !MEM_P (inner) |
230d793d | 6179 | && (! in_dest |
f8cfc6aa | 6180 | || (REG_P (inner) |
ef89d648 | 6181 | && have_insn_for (STRICT_LOW_PART, tmode)))) |
3c0cb5de | 6182 | || (MEM_P (inner) && pos_rtx == 0 |
dfbe1b2f RK |
6183 | && (pos |
6184 | % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode) | |
6185 | : BITS_PER_UNIT)) == 0 | |
230d793d RS |
6186 | /* We can't do this if we are widening INNER_MODE (it |
6187 | may not be aligned, for one thing). */ | |
6188 | && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode) | |
6189 | && (inner_mode == tmode | |
6190 | || (! mode_dependent_address_p (XEXP (inner, 0)) | |
6191 | && ! MEM_VOLATILE_P (inner)))))) | |
6192 | { | |
230d793d RS |
6193 | /* If INNER is a MEM, make a new MEM that encompasses just the desired |
6194 | field. If the original and current mode are the same, we need not | |
663522cb | 6195 | adjust the offset. Otherwise, we do if bytes big endian. |
230d793d | 6196 | |
4d9cfc7b RK |
6197 | If INNER is not a MEM, get a piece consisting of just the field |
6198 | of interest (in this case POS % BITS_PER_WORD must be 0). */ | |
230d793d | 6199 | |
3c0cb5de | 6200 | if (MEM_P (inner)) |
230d793d | 6201 | { |
f1ec5147 RK |
6202 | HOST_WIDE_INT offset; |
6203 | ||
94b4b17a RS |
6204 | /* POS counts from lsb, but make OFFSET count in memory order. */ |
6205 | if (BYTES_BIG_ENDIAN) | |
6206 | offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT; | |
6207 | else | |
6208 | offset = pos / BITS_PER_UNIT; | |
230d793d | 6209 | |
f1ec5147 | 6210 | new = adjust_address_nv (inner, tmode, offset); |
230d793d | 6211 | } |
f8cfc6aa | 6212 | else if (REG_P (inner)) |
c0d3ac4d | 6213 | { |
c0d3ac4d | 6214 | if (tmode != inner_mode) |
ddef6bc7 | 6215 | { |
4de249d9 | 6216 | /* We can't call gen_lowpart in a DEST since we |
91f8389c EB |
6217 | always want a SUBREG (see below) and it would sometimes |
6218 | return a new hard register. */ | |
6219 | if (pos || in_dest) | |
54c2fc72 | 6220 | { |
54c2fc72 JW |
6221 | HOST_WIDE_INT final_word = pos / BITS_PER_WORD; |
6222 | ||
6223 | if (WORDS_BIG_ENDIAN | |
6224 | && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD) | |
6225 | final_word = ((GET_MODE_SIZE (inner_mode) | |
6226 | - GET_MODE_SIZE (tmode)) | |
6227 | / UNITS_PER_WORD) - final_word; | |
6228 | ||
6229 | final_word *= UNITS_PER_WORD; | |
6230 | if (BYTES_BIG_ENDIAN && | |
6231 | GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode)) | |
6232 | final_word += (GET_MODE_SIZE (inner_mode) | |
6233 | - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD; | |
6234 | ||
6235 | /* Avoid creating invalid subregs, for example when | |
6236 | simplifying (x>>32)&255. */ | |
6237 | if (final_word >= GET_MODE_SIZE (inner_mode)) | |
6238 | return NULL_RTX; | |
6239 | ||
6240 | new = gen_rtx_SUBREG (tmode, inner, final_word); | |
6241 | } | |
6242 | else | |
4de249d9 | 6243 | new = gen_lowpart (tmode, inner); |
ddef6bc7 | 6244 | } |
23190837 AJ |
6245 | else |
6246 | new = inner; | |
6247 | } | |
230d793d | 6248 | else |
6139ff20 RK |
6249 | new = force_to_mode (inner, tmode, |
6250 | len >= HOST_BITS_PER_WIDE_INT | |
0345195a | 6251 | ? ~(unsigned HOST_WIDE_INT) 0 |
729a2125 | 6252 | : ((unsigned HOST_WIDE_INT) 1 << len) - 1, |
e3d616e3 | 6253 | NULL_RTX, 0); |
230d793d | 6254 | |
663522cb | 6255 | /* If this extraction is going into the destination of a SET, |
230d793d RS |
6256 | make a STRICT_LOW_PART unless we made a MEM. */ |
6257 | ||
6258 | if (in_dest) | |
3c0cb5de | 6259 | return (MEM_P (new) ? new |
77fa0940 | 6260 | : (GET_CODE (new) != SUBREG |
38a448ca | 6261 | ? gen_rtx_CLOBBER (tmode, const0_rtx) |
f1c6ba8b | 6262 | : gen_rtx_STRICT_LOW_PART (VOIDmode, new))); |
230d793d | 6263 | |
0f808b6f JH |
6264 | if (mode == tmode) |
6265 | return new; | |
6266 | ||
0a7ec763 | 6267 | if (GET_CODE (new) == CONST_INT) |
2496c7bd | 6268 | return gen_int_mode (INTVAL (new), mode); |
0a7ec763 | 6269 | |
0f808b6f JH |
6270 | /* If we know that no extraneous bits are set, and that the high |
6271 | bit is not set, convert the extraction to the cheaper of | |
6272 | sign and zero extension, that are equivalent in these cases. */ | |
6273 | if (flag_expensive_optimizations | |
6274 | && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT | |
6275 | && ((nonzero_bits (new, tmode) | |
663522cb KH |
6276 | & ~(((unsigned HOST_WIDE_INT) |
6277 | GET_MODE_MASK (tmode)) | |
6278 | >> 1)) | |
0f808b6f JH |
6279 | == 0))) |
6280 | { | |
6281 | rtx temp = gen_rtx_ZERO_EXTEND (mode, new); | |
6282 | rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new); | |
6283 | ||
6284 | /* Prefer ZERO_EXTENSION, since it gives more information to | |
6285 | backends. */ | |
25ffb1f6 | 6286 | if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET)) |
0f808b6f JH |
6287 | return temp; |
6288 | return temp1; | |
6289 | } | |
6290 | ||
230d793d RS |
6291 | /* Otherwise, sign- or zero-extend unless we already are in the |
6292 | proper mode. */ | |
6293 | ||
f1c6ba8b RK |
6294 | return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, |
6295 | mode, new)); | |
230d793d RS |
6296 | } |
6297 | ||
cc471082 RS |
6298 | /* Unless this is a COMPARE or we have a funny memory reference, |
6299 | don't do anything with zero-extending field extracts starting at | |
6300 | the low-order bit since they are simple AND operations. */ | |
8999a12e RK |
6301 | if (pos_rtx == 0 && pos == 0 && ! in_dest |
6302 | && ! in_compare && ! spans_byte && unsignedp) | |
230d793d RS |
6303 | return 0; |
6304 | ||
c5c76735 JL |
6305 | /* Unless we are allowed to span bytes or INNER is not MEM, reject this if |
6306 | we would be spanning bytes or if the position is not a constant and the | |
6307 | length is not 1. In all other cases, we would only be going outside | |
6308 | our object in cases when an original shift would have been | |
e7373556 | 6309 | undefined. */ |
3c0cb5de | 6310 | if (! spans_byte && MEM_P (inner) |
e7373556 RK |
6311 | && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode)) |
6312 | || (pos_rtx != 0 && len != 1))) | |
6313 | return 0; | |
6314 | ||
d7cd794f | 6315 | /* Get the mode to use should INNER not be a MEM, the mode for the position, |
230d793d | 6316 | and the mode for the result. */ |
505ddab6 | 6317 | if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE) |
230d793d | 6318 | { |
da920570 ZW |
6319 | wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0); |
6320 | pos_mode = mode_for_extraction (EP_insv, 2); | |
6321 | extraction_mode = mode_for_extraction (EP_insv, 3); | |
230d793d | 6322 | } |
230d793d | 6323 | |
da920570 ZW |
6324 | if (! in_dest && unsignedp |
6325 | && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE) | |
230d793d | 6326 | { |
da920570 ZW |
6327 | wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1); |
6328 | pos_mode = mode_for_extraction (EP_extzv, 3); | |
6329 | extraction_mode = mode_for_extraction (EP_extzv, 0); | |
230d793d | 6330 | } |
230d793d | 6331 | |
da920570 ZW |
6332 | if (! in_dest && ! unsignedp |
6333 | && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE) | |
230d793d | 6334 | { |
da920570 ZW |
6335 | wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1); |
6336 | pos_mode = mode_for_extraction (EP_extv, 3); | |
6337 | extraction_mode = mode_for_extraction (EP_extv, 0); | |
230d793d | 6338 | } |
230d793d RS |
6339 | |
6340 | /* Never narrow an object, since that might not be safe. */ | |
6341 | ||
6342 | if (mode != VOIDmode | |
6343 | && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode)) | |
6344 | extraction_mode = mode; | |
6345 | ||
6346 | if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode | |
6347 | && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx))) | |
6348 | pos_mode = GET_MODE (pos_rtx); | |
6349 | ||
d7cd794f RK |
6350 | /* If this is not from memory, the desired mode is wanted_inner_reg_mode; |
6351 | if we have to change the mode of memory and cannot, the desired mode is | |
6352 | EXTRACTION_MODE. */ | |
3c0cb5de | 6353 | if (!MEM_P (inner)) |
d7cd794f RK |
6354 | wanted_inner_mode = wanted_inner_reg_mode; |
6355 | else if (inner_mode != wanted_inner_mode | |
6356 | && (mode_dependent_address_p (XEXP (inner, 0)) | |
6357 | || MEM_VOLATILE_P (inner))) | |
6358 | wanted_inner_mode = extraction_mode; | |
230d793d | 6359 | |
6139ff20 RK |
6360 | orig_pos = pos; |
6361 | ||
f76b9db2 ILT |
6362 | if (BITS_BIG_ENDIAN) |
6363 | { | |
cf54c2cd DE |
6364 | /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to |
6365 | BITS_BIG_ENDIAN style. If position is constant, compute new | |
6366 | position. Otherwise, build subtraction. | |
6367 | Note that POS is relative to the mode of the original argument. | |
6368 | If it's a MEM we need to recompute POS relative to that. | |
6369 | However, if we're extracting from (or inserting into) a register, | |
6370 | we want to recompute POS relative to wanted_inner_mode. */ | |
3c0cb5de | 6371 | int width = (MEM_P (inner) |
cf54c2cd DE |
6372 | ? GET_MODE_BITSIZE (is_mode) |
6373 | : GET_MODE_BITSIZE (wanted_inner_mode)); | |
6374 | ||
f76b9db2 | 6375 | if (pos_rtx == 0) |
cf54c2cd | 6376 | pos = width - len - pos; |
f76b9db2 ILT |
6377 | else |
6378 | pos_rtx | |
f1c6ba8b | 6379 | = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx); |
cf54c2cd | 6380 | /* POS may be less than 0 now, but we check for that below. |
3c0cb5de | 6381 | Note that it can only be less than 0 if !MEM_P (inner). */ |
f76b9db2 | 6382 | } |
230d793d RS |
6383 | |
6384 | /* If INNER has a wider mode, make it smaller. If this is a constant | |
6385 | extract, try to adjust the byte to point to the byte containing | |
6386 | the value. */ | |
d7cd794f RK |
6387 | if (wanted_inner_mode != VOIDmode |
6388 | && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode) | |
3c0cb5de | 6389 | && ((MEM_P (inner) |
d7cd794f | 6390 | && (inner_mode == wanted_inner_mode |
230d793d RS |
6391 | || (! mode_dependent_address_p (XEXP (inner, 0)) |
6392 | && ! MEM_VOLATILE_P (inner)))))) | |
6393 | { | |
6394 | int offset = 0; | |
6395 | ||
6396 | /* The computations below will be correct if the machine is big | |
6397 | endian in both bits and bytes or little endian in bits and bytes. | |
6398 | If it is mixed, we must adjust. */ | |
663522cb | 6399 | |
230d793d | 6400 | /* If bytes are big endian and we had a paradoxical SUBREG, we must |
0f41302f | 6401 | adjust OFFSET to compensate. */ |
f76b9db2 ILT |
6402 | if (BYTES_BIG_ENDIAN |
6403 | && ! spans_byte | |
230d793d RS |
6404 | && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode)) |
6405 | offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode); | |
230d793d RS |
6406 | |
6407 | /* If this is a constant position, we can move to the desired byte. */ | |
8999a12e | 6408 | if (pos_rtx == 0) |
230d793d RS |
6409 | { |
6410 | offset += pos / BITS_PER_UNIT; | |
d7cd794f | 6411 | pos %= GET_MODE_BITSIZE (wanted_inner_mode); |
230d793d RS |
6412 | } |
6413 | ||
f76b9db2 ILT |
6414 | if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN |
6415 | && ! spans_byte | |
d7cd794f | 6416 | && is_mode != wanted_inner_mode) |
c6b3f1f2 | 6417 | offset = (GET_MODE_SIZE (is_mode) |
d7cd794f | 6418 | - GET_MODE_SIZE (wanted_inner_mode) - offset); |
c6b3f1f2 | 6419 | |
d7cd794f | 6420 | if (offset != 0 || inner_mode != wanted_inner_mode) |
f1ec5147 | 6421 | inner = adjust_address_nv (inner, wanted_inner_mode, offset); |
230d793d RS |
6422 | } |
6423 | ||
9e74dc41 RK |
6424 | /* If INNER is not memory, we can always get it into the proper mode. If we |
6425 | are changing its mode, POS must be a constant and smaller than the size | |
6426 | of the new mode. */ | |
3c0cb5de | 6427 | else if (!MEM_P (inner)) |
9e74dc41 RK |
6428 | { |
6429 | if (GET_MODE (inner) != wanted_inner_mode | |
6430 | && (pos_rtx != 0 | |
6431 | || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode))) | |
6432 | return 0; | |
6433 | ||
6434 | inner = force_to_mode (inner, wanted_inner_mode, | |
6435 | pos_rtx | |
6436 | || len + orig_pos >= HOST_BITS_PER_WIDE_INT | |
0345195a | 6437 | ? ~(unsigned HOST_WIDE_INT) 0 |
729a2125 RK |
6438 | : ((((unsigned HOST_WIDE_INT) 1 << len) - 1) |
6439 | << orig_pos), | |
9e74dc41 RK |
6440 | NULL_RTX, 0); |
6441 | } | |
230d793d RS |
6442 | |
6443 | /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we | |
6444 | have to zero extend. Otherwise, we can just use a SUBREG. */ | |
8999a12e | 6445 | if (pos_rtx != 0 |
230d793d | 6446 | && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx))) |
0f808b6f | 6447 | { |
f1c6ba8b | 6448 | rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx); |
0f808b6f JH |
6449 | |
6450 | /* If we know that no extraneous bits are set, and that the high | |
eaec9b3d | 6451 | bit is not set, convert extraction to cheaper one - either |
0f808b6f JH |
6452 | SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these |
6453 | cases. */ | |
6454 | if (flag_expensive_optimizations | |
6455 | && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT | |
6456 | && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx)) | |
663522cb KH |
6457 | & ~(((unsigned HOST_WIDE_INT) |
6458 | GET_MODE_MASK (GET_MODE (pos_rtx))) | |
6459 | >> 1)) | |
0f808b6f JH |
6460 | == 0))) |
6461 | { | |
6462 | rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx); | |
6463 | ||
25ffb1f6 | 6464 | /* Prefer ZERO_EXTENSION, since it gives more information to |
0f808b6f JH |
6465 | backends. */ |
6466 | if (rtx_cost (temp1, SET) < rtx_cost (temp, SET)) | |
6467 | temp = temp1; | |
6468 | } | |
6469 | pos_rtx = temp; | |
6470 | } | |
8999a12e | 6471 | else if (pos_rtx != 0 |
230d793d | 6472 | && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx))) |
4de249d9 | 6473 | pos_rtx = gen_lowpart (pos_mode, pos_rtx); |
230d793d | 6474 | |
8999a12e RK |
6475 | /* Make POS_RTX unless we already have it and it is correct. If we don't |
6476 | have a POS_RTX but we do have an ORIG_POS_RTX, the latter must | |
0f41302f | 6477 | be a CONST_INT. */ |
8999a12e RK |
6478 | if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos) |
6479 | pos_rtx = orig_pos_rtx; | |
6480 | ||
6481 | else if (pos_rtx == 0) | |
5f4f0e22 | 6482 | pos_rtx = GEN_INT (pos); |
230d793d RS |
6483 | |
6484 | /* Make the required operation. See if we can use existing rtx. */ | |
f1c6ba8b | 6485 | new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT, |
5f4f0e22 | 6486 | extraction_mode, inner, GEN_INT (len), pos_rtx); |
230d793d | 6487 | if (! in_dest) |
4de249d9 | 6488 | new = gen_lowpart (mode, new); |
230d793d RS |
6489 | |
6490 | return new; | |
6491 | } | |
6492 | \f | |
71923da7 RK |
6493 | /* See if X contains an ASHIFT of COUNT or more bits that can be commuted |
6494 | with any other operations in X. Return X without that shift if so. */ | |
6495 | ||
6496 | static rtx | |
79a490a9 | 6497 | extract_left_shift (rtx x, int count) |
71923da7 RK |
6498 | { |
6499 | enum rtx_code code = GET_CODE (x); | |
6500 | enum machine_mode mode = GET_MODE (x); | |
6501 | rtx tem; | |
6502 | ||
6503 | switch (code) | |
6504 | { | |
6505 | case ASHIFT: | |
6506 | /* This is the shift itself. If it is wide enough, we will return | |
6507 | either the value being shifted if the shift count is equal to | |
6508 | COUNT or a shift for the difference. */ | |
6509 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
6510 | && INTVAL (XEXP (x, 1)) >= count) | |
6511 | return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), | |
6512 | INTVAL (XEXP (x, 1)) - count); | |
6513 | break; | |
6514 | ||
6515 | case NEG: case NOT: | |
6516 | if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0) | |
f1c6ba8b | 6517 | return simplify_gen_unary (code, mode, tem, mode); |
71923da7 RK |
6518 | |
6519 | break; | |
6520 | ||
6521 | case PLUS: case IOR: case XOR: case AND: | |
6522 | /* If we can safely shift this constant and we find the inner shift, | |
6523 | make a new operation. */ | |
e869aa39 | 6524 | if (GET_CODE (XEXP (x, 1)) == CONST_INT |
b729186a | 6525 | && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0 |
71923da7 | 6526 | && (tem = extract_left_shift (XEXP (x, 0), count)) != 0) |
1999435c PB |
6527 | return gen_binary (code, mode, tem, |
6528 | GEN_INT (INTVAL (XEXP (x, 1)) >> count)); | |
71923da7 RK |
6529 | |
6530 | break; | |
663522cb | 6531 | |
e9a25f70 JL |
6532 | default: |
6533 | break; | |
71923da7 RK |
6534 | } |
6535 | ||
6536 | return 0; | |
6537 | } | |
6538 | \f | |
230d793d RS |
6539 | /* Look at the expression rooted at X. Look for expressions |
6540 | equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND. | |
6541 | Form these expressions. | |
6542 | ||
6543 | Return the new rtx, usually just X. | |
6544 | ||
8aeea6e6 | 6545 | Also, for machines like the VAX that don't have logical shift insns, |
230d793d RS |
6546 | try to convert logical to arithmetic shift operations in cases where |
6547 | they are equivalent. This undoes the canonicalizations to logical | |
6548 | shifts done elsewhere. | |
6549 | ||
6550 | We try, as much as possible, to re-use rtl expressions to save memory. | |
6551 | ||
6552 | IN_CODE says what kind of expression we are processing. Normally, it is | |
42495ca0 RK |
6553 | SET. In a memory address (inside a MEM, PLUS or minus, the latter two |
6554 | being kludges), it is MEM. When processing the arguments of a comparison | |
230d793d RS |
6555 | or a COMPARE against zero, it is COMPARE. */ |
6556 | ||
6557 | static rtx | |
79a490a9 | 6558 | make_compound_operation (rtx x, enum rtx_code in_code) |
230d793d RS |
6559 | { |
6560 | enum rtx_code code = GET_CODE (x); | |
6561 | enum machine_mode mode = GET_MODE (x); | |
6562 | int mode_width = GET_MODE_BITSIZE (mode); | |
71923da7 | 6563 | rtx rhs, lhs; |
230d793d | 6564 | enum rtx_code next_code; |
f24ad0e4 | 6565 | int i; |
230d793d | 6566 | rtx new = 0; |
280f58ba | 6567 | rtx tem; |
6f7d635c | 6568 | const char *fmt; |
230d793d RS |
6569 | |
6570 | /* Select the code to be used in recursive calls. Once we are inside an | |
6571 | address, we stay there. If we have a comparison, set to COMPARE, | |
6572 | but once inside, go back to our default of SET. */ | |
6573 | ||
42495ca0 | 6574 | next_code = (code == MEM || code == PLUS || code == MINUS ? MEM |
ec8e098d | 6575 | : ((code == COMPARE || COMPARISON_P (x)) |
230d793d RS |
6576 | && XEXP (x, 1) == const0_rtx) ? COMPARE |
6577 | : in_code == COMPARE ? SET : in_code); | |
6578 | ||
6579 | /* Process depending on the code of this operation. If NEW is set | |
da7d8304 | 6580 | nonzero, it will be returned. */ |
230d793d RS |
6581 | |
6582 | switch (code) | |
6583 | { | |
6584 | case ASHIFT: | |
230d793d RS |
6585 | /* Convert shifts by constants into multiplications if inside |
6586 | an address. */ | |
6587 | if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT | |
5f4f0e22 | 6588 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT |
230d793d | 6589 | && INTVAL (XEXP (x, 1)) >= 0) |
280f58ba RK |
6590 | { |
6591 | new = make_compound_operation (XEXP (x, 0), next_code); | |
f1c6ba8b RK |
6592 | new = gen_rtx_MULT (mode, new, |
6593 | GEN_INT ((HOST_WIDE_INT) 1 | |
6594 | << INTVAL (XEXP (x, 1)))); | |
280f58ba | 6595 | } |
230d793d RS |
6596 | break; |
6597 | ||
6598 | case AND: | |
6599 | /* If the second operand is not a constant, we can't do anything | |
6600 | with it. */ | |
6601 | if (GET_CODE (XEXP (x, 1)) != CONST_INT) | |
6602 | break; | |
6603 | ||
6604 | /* If the constant is a power of two minus one and the first operand | |
6605 | is a logical right shift, make an extraction. */ | |
6606 | if (GET_CODE (XEXP (x, 0)) == LSHIFTRT | |
6607 | && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) | |
280f58ba RK |
6608 | { |
6609 | new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code); | |
6610 | new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1, | |
6611 | 0, in_code == COMPARE); | |
6612 | } | |
dfbe1b2f | 6613 | |
230d793d RS |
6614 | /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */ |
6615 | else if (GET_CODE (XEXP (x, 0)) == SUBREG | |
6616 | && subreg_lowpart_p (XEXP (x, 0)) | |
6617 | && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT | |
6618 | && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) | |
280f58ba RK |
6619 | { |
6620 | new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0), | |
6621 | next_code); | |
2f99f437 | 6622 | new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0, |
280f58ba RK |
6623 | XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1, |
6624 | 0, in_code == COMPARE); | |
6625 | } | |
45620ed4 | 6626 | /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */ |
c2f9f64e JW |
6627 | else if ((GET_CODE (XEXP (x, 0)) == XOR |
6628 | || GET_CODE (XEXP (x, 0)) == IOR) | |
6629 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT | |
6630 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT | |
6631 | && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) | |
6632 | { | |
6633 | /* Apply the distributive law, and then try to make extractions. */ | |
f1c6ba8b RK |
6634 | new = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode, |
6635 | gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0), | |
6636 | XEXP (x, 1)), | |
6637 | gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1), | |
6638 | XEXP (x, 1))); | |
c2f9f64e JW |
6639 | new = make_compound_operation (new, in_code); |
6640 | } | |
a7c99304 RK |
6641 | |
6642 | /* If we are have (and (rotate X C) M) and C is larger than the number | |
6643 | of bits in M, this is an extraction. */ | |
6644 | ||
6645 | else if (GET_CODE (XEXP (x, 0)) == ROTATE | |
6646 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
6647 | && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0 | |
6648 | && i <= INTVAL (XEXP (XEXP (x, 0), 1))) | |
280f58ba RK |
6649 | { |
6650 | new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code); | |
6651 | new = make_extraction (mode, new, | |
6652 | (GET_MODE_BITSIZE (mode) | |
6653 | - INTVAL (XEXP (XEXP (x, 0), 1))), | |
6654 | NULL_RTX, i, 1, 0, in_code == COMPARE); | |
6655 | } | |
a7c99304 RK |
6656 | |
6657 | /* On machines without logical shifts, if the operand of the AND is | |
230d793d RS |
6658 | a logical shift and our mask turns off all the propagated sign |
6659 | bits, we can replace the logical shift with an arithmetic shift. */ | |
ef89d648 ZW |
6660 | else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT |
6661 | && !have_insn_for (LSHIFTRT, mode) | |
6662 | && have_insn_for (ASHIFTRT, mode) | |
230d793d RS |
6663 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT |
6664 | && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 | |
5f4f0e22 CH |
6665 | && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT |
6666 | && mode_width <= HOST_BITS_PER_WIDE_INT) | |
230d793d | 6667 | { |
5f4f0e22 | 6668 | unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode); |
230d793d RS |
6669 | |
6670 | mask >>= INTVAL (XEXP (XEXP (x, 0), 1)); | |
6671 | if ((INTVAL (XEXP (x, 1)) & ~mask) == 0) | |
6672 | SUBST (XEXP (x, 0), | |
f1c6ba8b RK |
6673 | gen_rtx_ASHIFTRT (mode, |
6674 | make_compound_operation | |
6675 | (XEXP (XEXP (x, 0), 0), next_code), | |
6676 | XEXP (XEXP (x, 0), 1))); | |
230d793d RS |
6677 | } |
6678 | ||
6679 | /* If the constant is one less than a power of two, this might be | |
6680 | representable by an extraction even if no shift is present. | |
6681 | If it doesn't end up being a ZERO_EXTEND, we will ignore it unless | |
6682 | we are in a COMPARE. */ | |
6683 | else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) | |
280f58ba RK |
6684 | new = make_extraction (mode, |
6685 | make_compound_operation (XEXP (x, 0), | |
6686 | next_code), | |
6687 | 0, NULL_RTX, i, 1, 0, in_code == COMPARE); | |
230d793d RS |
6688 | |
6689 | /* If we are in a comparison and this is an AND with a power of two, | |
6690 | convert this into the appropriate bit extract. */ | |
6691 | else if (in_code == COMPARE | |
6692 | && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0) | |
280f58ba RK |
6693 | new = make_extraction (mode, |
6694 | make_compound_operation (XEXP (x, 0), | |
6695 | next_code), | |
6696 | i, NULL_RTX, 1, 1, 0, 1); | |
230d793d RS |
6697 | |
6698 | break; | |
6699 | ||
6700 | case LSHIFTRT: | |
6701 | /* If the sign bit is known to be zero, replace this with an | |
6702 | arithmetic shift. */ | |
ef89d648 ZW |
6703 | if (have_insn_for (ASHIFTRT, mode) |
6704 | && ! have_insn_for (LSHIFTRT, mode) | |
5f4f0e22 | 6705 | && mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 6706 | && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0) |
230d793d | 6707 | { |
f1c6ba8b RK |
6708 | new = gen_rtx_ASHIFTRT (mode, |
6709 | make_compound_operation (XEXP (x, 0), | |
6710 | next_code), | |
6711 | XEXP (x, 1)); | |
230d793d RS |
6712 | break; |
6713 | } | |
6714 | ||
0f41302f | 6715 | /* ... fall through ... */ |
230d793d RS |
6716 | |
6717 | case ASHIFTRT: | |
71923da7 RK |
6718 | lhs = XEXP (x, 0); |
6719 | rhs = XEXP (x, 1); | |
6720 | ||
230d793d RS |
6721 | /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1, |
6722 | this is a SIGN_EXTRACT. */ | |
71923da7 RK |
6723 | if (GET_CODE (rhs) == CONST_INT |
6724 | && GET_CODE (lhs) == ASHIFT | |
6725 | && GET_CODE (XEXP (lhs, 1)) == CONST_INT | |
6726 | && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1))) | |
280f58ba | 6727 | { |
71923da7 | 6728 | new = make_compound_operation (XEXP (lhs, 0), next_code); |
280f58ba | 6729 | new = make_extraction (mode, new, |
71923da7 RK |
6730 | INTVAL (rhs) - INTVAL (XEXP (lhs, 1)), |
6731 | NULL_RTX, mode_width - INTVAL (rhs), | |
d0ab8cd3 | 6732 | code == LSHIFTRT, 0, in_code == COMPARE); |
8231ad94 | 6733 | break; |
d0ab8cd3 RK |
6734 | } |
6735 | ||
71923da7 RK |
6736 | /* See if we have operations between an ASHIFTRT and an ASHIFT. |
6737 | If so, try to merge the shifts into a SIGN_EXTEND. We could | |
6738 | also do this for some cases of SIGN_EXTRACT, but it doesn't | |
6739 | seem worth the effort; the case checked for occurs on Alpha. */ | |
663522cb | 6740 | |
ec8e098d | 6741 | if (!OBJECT_P (lhs) |
71923da7 | 6742 | && ! (GET_CODE (lhs) == SUBREG |
ec8e098d | 6743 | && (OBJECT_P (SUBREG_REG (lhs)))) |
71923da7 RK |
6744 | && GET_CODE (rhs) == CONST_INT |
6745 | && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT | |
6746 | && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0) | |
6747 | new = make_extraction (mode, make_compound_operation (new, next_code), | |
6748 | 0, NULL_RTX, mode_width - INTVAL (rhs), | |
6749 | code == LSHIFTRT, 0, in_code == COMPARE); | |
663522cb | 6750 | |
230d793d | 6751 | break; |
280f58ba RK |
6752 | |
6753 | case SUBREG: | |
6754 | /* Call ourselves recursively on the inner expression. If we are | |
6755 | narrowing the object and it has a different RTL code from | |
6756 | what it originally did, do this SUBREG as a force_to_mode. */ | |
6757 | ||
0a5cbff6 | 6758 | tem = make_compound_operation (SUBREG_REG (x), in_code); |
280f58ba RK |
6759 | if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x)) |
6760 | && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem)) | |
6761 | && subreg_lowpart_p (x)) | |
0a5cbff6 | 6762 | { |
e8dc6d50 JH |
6763 | rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0, |
6764 | NULL_RTX, 0); | |
0a5cbff6 RK |
6765 | |
6766 | /* If we have something other than a SUBREG, we might have | |
eaec9b3d | 6767 | done an expansion, so rerun ourselves. */ |
0a5cbff6 RK |
6768 | if (GET_CODE (newer) != SUBREG) |
6769 | newer = make_compound_operation (newer, in_code); | |
6770 | ||
6771 | return newer; | |
6772 | } | |
6f28d3e9 RH |
6773 | |
6774 | /* If this is a paradoxical subreg, and the new code is a sign or | |
6775 | zero extension, omit the subreg and widen the extension. If it | |
6776 | is a regular subreg, we can still get rid of the subreg by not | |
6777 | widening so much, or in fact removing the extension entirely. */ | |
6778 | if ((GET_CODE (tem) == SIGN_EXTEND | |
6779 | || GET_CODE (tem) == ZERO_EXTEND) | |
6780 | && subreg_lowpart_p (x)) | |
6781 | { | |
6782 | if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem)) | |
6783 | || (GET_MODE_SIZE (mode) > | |
6784 | GET_MODE_SIZE (GET_MODE (XEXP (tem, 0))))) | |
b10f2187 | 6785 | { |
ab5ecb01 | 6786 | if (! SCALAR_INT_MODE_P (mode)) |
b10f2187 R |
6787 | break; |
6788 | tem = gen_rtx_fmt_e (GET_CODE (tem), mode, XEXP (tem, 0)); | |
6789 | } | |
6f28d3e9 | 6790 | else |
4de249d9 | 6791 | tem = gen_lowpart (mode, XEXP (tem, 0)); |
6f28d3e9 RH |
6792 | return tem; |
6793 | } | |
e9a25f70 | 6794 | break; |
663522cb | 6795 | |
e9a25f70 JL |
6796 | default: |
6797 | break; | |
230d793d RS |
6798 | } |
6799 | ||
6800 | if (new) | |
6801 | { | |
4de249d9 | 6802 | x = gen_lowpart (mode, new); |
230d793d RS |
6803 | code = GET_CODE (x); |
6804 | } | |
6805 | ||
6806 | /* Now recursively process each operand of this operation. */ | |
6807 | fmt = GET_RTX_FORMAT (code); | |
6808 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
6809 | if (fmt[i] == 'e') | |
6810 | { | |
6811 | new = make_compound_operation (XEXP (x, i), next_code); | |
6812 | SUBST (XEXP (x, i), new); | |
6813 | } | |
6814 | ||
6815 | return x; | |
6816 | } | |
6817 | \f | |
6818 | /* Given M see if it is a value that would select a field of bits | |
663522cb KH |
6819 | within an item, but not the entire word. Return -1 if not. |
6820 | Otherwise, return the starting position of the field, where 0 is the | |
6821 | low-order bit. | |
230d793d RS |
6822 | |
6823 | *PLEN is set to the length of the field. */ | |
6824 | ||
6825 | static int | |
79a490a9 | 6826 | get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen) |
230d793d RS |
6827 | { |
6828 | /* Get the bit number of the first 1 bit from the right, -1 if none. */ | |
663522cb | 6829 | int pos = exact_log2 (m & -m); |
6de9cd9a | 6830 | int len = 0; |
230d793d | 6831 | |
6de9cd9a DN |
6832 | if (pos >= 0) |
6833 | /* Now shift off the low-order zero bits and see if we have a | |
6834 | power of two minus 1. */ | |
6835 | len = exact_log2 ((m >> pos) + 1); | |
230d793d | 6836 | |
d3bc8938 | 6837 | if (len <= 0) |
6de9cd9a | 6838 | pos = -1; |
230d793d | 6839 | |
d3bc8938 | 6840 | *plen = len; |
230d793d RS |
6841 | return pos; |
6842 | } | |
6843 | \f | |
6139ff20 RK |
6844 | /* See if X can be simplified knowing that we will only refer to it in |
6845 | MODE and will only refer to those bits that are nonzero in MASK. | |
6846 | If other bits are being computed or if masking operations are done | |
6847 | that select a superset of the bits in MASK, they can sometimes be | |
6848 | ignored. | |
6849 | ||
6850 | Return a possibly simplified expression, but always convert X to | |
6851 | MODE. If X is a CONST_INT, AND the CONST_INT with MASK. | |
dfbe1b2f | 6852 | |
da7d8304 | 6853 | Also, if REG is nonzero and X is a register equal in value to REG, |
e3d616e3 RK |
6854 | replace X with REG. |
6855 | ||
6856 | If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK | |
6857 | are all off in X. This is used when X will be complemented, by either | |
180b8e4b | 6858 | NOT, NEG, or XOR. */ |
dfbe1b2f RK |
6859 | |
6860 | static rtx | |
79a490a9 AJ |
6861 | force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask, |
6862 | rtx reg, int just_select) | |
dfbe1b2f RK |
6863 | { |
6864 | enum rtx_code code = GET_CODE (x); | |
180b8e4b | 6865 | int next_select = just_select || code == XOR || code == NOT || code == NEG; |
ef026f91 RS |
6866 | enum machine_mode op_mode; |
6867 | unsigned HOST_WIDE_INT fuller_mask, nonzero; | |
6139ff20 RK |
6868 | rtx op0, op1, temp; |
6869 | ||
132d2040 RK |
6870 | /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the |
6871 | code below will do the wrong thing since the mode of such an | |
663522cb | 6872 | expression is VOIDmode. |
be3d27d6 CI |
6873 | |
6874 | Also do nothing if X is a CLOBBER; this can happen if X was | |
4de249d9 | 6875 | the return value from a call to gen_lowpart. */ |
be3d27d6 | 6876 | if (code == CALL || code == ASM_OPERANDS || code == CLOBBER) |
246e00f2 RK |
6877 | return x; |
6878 | ||
6139ff20 RK |
6879 | /* We want to perform the operation is its present mode unless we know |
6880 | that the operation is valid in MODE, in which case we do the operation | |
6881 | in MODE. */ | |
1c75dfa4 | 6882 | op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x)) |
ef89d648 | 6883 | && have_insn_for (code, mode)) |
ef026f91 | 6884 | ? mode : GET_MODE (x)); |
e3d616e3 | 6885 | |
aa988991 RS |
6886 | /* It is not valid to do a right-shift in a narrower mode |
6887 | than the one it came in with. */ | |
6888 | if ((code == LSHIFTRT || code == ASHIFTRT) | |
6889 | && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x))) | |
6890 | op_mode = GET_MODE (x); | |
ef026f91 RS |
6891 | |
6892 | /* Truncate MASK to fit OP_MODE. */ | |
6893 | if (op_mode) | |
6894 | mask &= GET_MODE_MASK (op_mode); | |
6139ff20 RK |
6895 | |
6896 | /* When we have an arithmetic operation, or a shift whose count we | |
50b29dbb | 6897 | do not know, we need to assume that all bits up to the highest-order |
6139ff20 | 6898 | bit in MASK will be needed. This is how we form such a mask. */ |
50b29dbb ILT |
6899 | if (mask & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))) |
6900 | fuller_mask = ~(unsigned HOST_WIDE_INT) 0; | |
ef026f91 | 6901 | else |
50b29dbb ILT |
6902 | fuller_mask = (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) |
6903 | - 1); | |
ef026f91 RS |
6904 | |
6905 | /* Determine what bits of X are guaranteed to be (non)zero. */ | |
6906 | nonzero = nonzero_bits (x, mode); | |
6139ff20 RK |
6907 | |
6908 | /* If none of the bits in X are needed, return a zero. */ | |
e3d616e3 | 6909 | if (! just_select && (nonzero & mask) == 0) |
ccf7aef4 | 6910 | x = const0_rtx; |
dfbe1b2f | 6911 | |
6139ff20 RK |
6912 | /* If X is a CONST_INT, return a new one. Do this here since the |
6913 | test below will fail. */ | |
6914 | if (GET_CODE (x) == CONST_INT) | |
ccf7aef4 RH |
6915 | { |
6916 | if (SCALAR_INT_MODE_P (mode)) | |
6917 | return gen_int_mode (INTVAL (x) & mask, mode); | |
6918 | else | |
6919 | { | |
6920 | x = GEN_INT (INTVAL (x) & mask); | |
6921 | return gen_lowpart_common (mode, x); | |
6922 | } | |
6923 | } | |
dfbe1b2f | 6924 | |
180b8e4b RK |
6925 | /* If X is narrower than MODE and we want all the bits in X's mode, just |
6926 | get X in the proper mode. */ | |
6927 | if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode) | |
663522cb | 6928 | && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0) |
4de249d9 | 6929 | return gen_lowpart (mode, x); |
dfbe1b2f RK |
6930 | |
6931 | switch (code) | |
6932 | { | |
6139ff20 RK |
6933 | case CLOBBER: |
6934 | /* If X is a (clobber (const_int)), return it since we know we are | |
0f41302f | 6935 | generating something that won't match. */ |
6139ff20 RK |
6936 | return x; |
6937 | ||
6139ff20 RK |
6938 | case USE: |
6939 | /* X is a (use (mem ..)) that was made from a bit-field extraction that | |
6940 | spanned the boundary of the MEM. If we are now masking so it is | |
6941 | within that boundary, we don't need the USE any more. */ | |
f76b9db2 | 6942 | if (! BITS_BIG_ENDIAN |
663522cb | 6943 | && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) |
e3d616e3 | 6944 | return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select); |
f76b9db2 | 6945 | break; |
6139ff20 | 6946 | |
dfbe1b2f RK |
6947 | case SIGN_EXTEND: |
6948 | case ZERO_EXTEND: | |
6949 | case ZERO_EXTRACT: | |
6950 | case SIGN_EXTRACT: | |
6951 | x = expand_compound_operation (x); | |
6952 | if (GET_CODE (x) != code) | |
e3d616e3 | 6953 | return force_to_mode (x, mode, mask, reg, next_select); |
dfbe1b2f RK |
6954 | break; |
6955 | ||
6956 | case REG: | |
6957 | if (reg != 0 && (rtx_equal_p (get_last_value (reg), x) | |
6958 | || rtx_equal_p (reg, get_last_value (x)))) | |
6959 | x = reg; | |
6960 | break; | |
6961 | ||
dfbe1b2f | 6962 | case SUBREG: |
6139ff20 | 6963 | if (subreg_lowpart_p (x) |
180b8e4b RK |
6964 | /* We can ignore the effect of this SUBREG if it narrows the mode or |
6965 | if the constant masks to zero all the bits the mode doesn't | |
6966 | have. */ | |
6139ff20 RK |
6967 | && ((GET_MODE_SIZE (GET_MODE (x)) |
6968 | < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
6139ff20 RK |
6969 | || (0 == (mask |
6970 | & GET_MODE_MASK (GET_MODE (x)) | |
663522cb | 6971 | & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))))))) |
e3d616e3 | 6972 | return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select); |
dfbe1b2f RK |
6973 | break; |
6974 | ||
6975 | case AND: | |
6139ff20 RK |
6976 | /* If this is an AND with a constant, convert it into an AND |
6977 | whose constant is the AND of that constant with MASK. If it | |
6978 | remains an AND of MASK, delete it since it is redundant. */ | |
dfbe1b2f | 6979 | |
2ca9ae17 | 6980 | if (GET_CODE (XEXP (x, 1)) == CONST_INT) |
dfbe1b2f | 6981 | { |
6139ff20 RK |
6982 | x = simplify_and_const_int (x, op_mode, XEXP (x, 0), |
6983 | mask & INTVAL (XEXP (x, 1))); | |
dfbe1b2f RK |
6984 | |
6985 | /* If X is still an AND, see if it is an AND with a mask that | |
71923da7 RK |
6986 | is just some low-order bits. If so, and it is MASK, we don't |
6987 | need it. */ | |
dfbe1b2f RK |
6988 | |
6989 | if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT | |
d0c9db30 | 6990 | && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x))) |
3129af4c | 6991 | == mask)) |
dfbe1b2f | 6992 | x = XEXP (x, 0); |
d0ab8cd3 | 6993 | |
71923da7 RK |
6994 | /* If it remains an AND, try making another AND with the bits |
6995 | in the mode mask that aren't in MASK turned on. If the | |
6996 | constant in the AND is wide enough, this might make a | |
6997 | cheaper constant. */ | |
6998 | ||
6999 | if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT | |
2ca9ae17 JW |
7000 | && GET_MODE_MASK (GET_MODE (x)) != mask |
7001 | && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT) | |
71923da7 RK |
7002 | { |
7003 | HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1)) | |
663522cb | 7004 | | (GET_MODE_MASK (GET_MODE (x)) & ~mask)); |
71923da7 RK |
7005 | int width = GET_MODE_BITSIZE (GET_MODE (x)); |
7006 | rtx y; | |
7007 | ||
71cc389b | 7008 | /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative |
71923da7 RK |
7009 | number, sign extend it. */ |
7010 | if (width > 0 && width < HOST_BITS_PER_WIDE_INT | |
7011 | && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0) | |
7012 | cval |= (HOST_WIDE_INT) -1 << width; | |
7013 | ||
1999435c | 7014 | y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval)); |
71923da7 RK |
7015 | if (rtx_cost (y, SET) < rtx_cost (x, SET)) |
7016 | x = y; | |
7017 | } | |
7018 | ||
d0ab8cd3 | 7019 | break; |
dfbe1b2f RK |
7020 | } |
7021 | ||
6139ff20 | 7022 | goto binop; |
dfbe1b2f RK |
7023 | |
7024 | case PLUS: | |
6139ff20 RK |
7025 | /* In (and (plus FOO C1) M), if M is a mask that just turns off |
7026 | low-order bits (as in an alignment operation) and FOO is already | |
7027 | aligned to that boundary, mask C1 to that boundary as well. | |
7028 | This may eliminate that PLUS and, later, the AND. */ | |
9fa6d012 TG |
7029 | |
7030 | { | |
770ae6cc | 7031 | unsigned int width = GET_MODE_BITSIZE (mode); |
9fa6d012 TG |
7032 | unsigned HOST_WIDE_INT smask = mask; |
7033 | ||
7034 | /* If MODE is narrower than HOST_WIDE_INT and mask is a negative | |
7035 | number, sign extend it. */ | |
7036 | ||
7037 | if (width < HOST_BITS_PER_WIDE_INT | |
7038 | && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0) | |
7039 | smask |= (HOST_WIDE_INT) -1 << width; | |
7040 | ||
7041 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
563c12b0 RH |
7042 | && exact_log2 (- smask) >= 0 |
7043 | && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0 | |
7044 | && (INTVAL (XEXP (x, 1)) & ~smask) != 0) | |
7045 | return force_to_mode (plus_constant (XEXP (x, 0), | |
7046 | (INTVAL (XEXP (x, 1)) & smask)), | |
7047 | mode, smask, reg, next_select); | |
9fa6d012 | 7048 | } |
6139ff20 | 7049 | |
0f41302f | 7050 | /* ... fall through ... */ |
6139ff20 | 7051 | |
dfbe1b2f | 7052 | case MULT: |
6139ff20 RK |
7053 | /* For PLUS, MINUS and MULT, we need any bits less significant than the |
7054 | most significant bit in MASK since carries from those bits will | |
7055 | affect the bits we are interested in. */ | |
7056 | mask = fuller_mask; | |
7057 | goto binop; | |
7058 | ||
d41638e4 RH |
7059 | case MINUS: |
7060 | /* If X is (minus C Y) where C's least set bit is larger than any bit | |
7061 | in the mask, then we may replace with (neg Y). */ | |
7062 | if (GET_CODE (XEXP (x, 0)) == CONST_INT | |
0345195a RK |
7063 | && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0)) |
7064 | & -INTVAL (XEXP (x, 0)))) | |
7065 | > mask)) | |
d41638e4 | 7066 | { |
f1c6ba8b RK |
7067 | x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1), |
7068 | GET_MODE (x)); | |
d41638e4 RH |
7069 | return force_to_mode (x, mode, mask, reg, next_select); |
7070 | } | |
7071 | ||
bc02f8d3 | 7072 | /* Similarly, if C contains every bit in the fuller_mask, then we may |
d41638e4 RH |
7073 | replace with (not Y). */ |
7074 | if (GET_CODE (XEXP (x, 0)) == CONST_INT | |
bc02f8d3 | 7075 | && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask) |
0345195a | 7076 | == INTVAL (XEXP (x, 0)))) |
d41638e4 | 7077 | { |
f1c6ba8b RK |
7078 | x = simplify_gen_unary (NOT, GET_MODE (x), |
7079 | XEXP (x, 1), GET_MODE (x)); | |
d41638e4 RH |
7080 | return force_to_mode (x, mode, mask, reg, next_select); |
7081 | } | |
7082 | ||
7083 | mask = fuller_mask; | |
7084 | goto binop; | |
7085 | ||
dfbe1b2f RK |
7086 | case IOR: |
7087 | case XOR: | |
6139ff20 RK |
7088 | /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and |
7089 | LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...) | |
7090 | operation which may be a bitfield extraction. Ensure that the | |
7091 | constant we form is not wider than the mode of X. */ | |
7092 | ||
7093 | if (GET_CODE (XEXP (x, 0)) == LSHIFTRT | |
7094 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
7095 | && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 | |
7096 | && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT | |
7097 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
7098 | && ((INTVAL (XEXP (XEXP (x, 0), 1)) | |
7099 | + floor_log2 (INTVAL (XEXP (x, 1)))) | |
7100 | < GET_MODE_BITSIZE (GET_MODE (x))) | |
7101 | && (INTVAL (XEXP (x, 1)) | |
663522cb | 7102 | & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0) |
6139ff20 RK |
7103 | { |
7104 | temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask) | |
663522cb | 7105 | << INTVAL (XEXP (XEXP (x, 0), 1))); |
1999435c PB |
7106 | temp = gen_binary (GET_CODE (x), GET_MODE (x), |
7107 | XEXP (XEXP (x, 0), 0), temp); | |
7108 | x = gen_binary (LSHIFTRT, GET_MODE (x), temp, | |
7109 | XEXP (XEXP (x, 0), 1)); | |
e3d616e3 | 7110 | return force_to_mode (x, mode, mask, reg, next_select); |
6139ff20 RK |
7111 | } |
7112 | ||
7113 | binop: | |
dfbe1b2f | 7114 | /* For most binary operations, just propagate into the operation and |
6d2f8887 | 7115 | change the mode if we have an operation of that mode. */ |
6139ff20 | 7116 | |
4de249d9 PB |
7117 | op0 = gen_lowpart (op_mode, |
7118 | force_to_mode (XEXP (x, 0), mode, mask, | |
7119 | reg, next_select)); | |
7120 | op1 = gen_lowpart (op_mode, | |
7121 | force_to_mode (XEXP (x, 1), mode, mask, | |
7122 | reg, next_select)); | |
6139ff20 RK |
7123 | |
7124 | if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1)) | |
1999435c | 7125 | x = gen_binary (code, op_mode, op0, op1); |
d0ab8cd3 | 7126 | break; |
dfbe1b2f RK |
7127 | |
7128 | case ASHIFT: | |
dfbe1b2f | 7129 | /* For left shifts, do the same, but just for the first operand. |
f6785026 RK |
7130 | However, we cannot do anything with shifts where we cannot |
7131 | guarantee that the counts are smaller than the size of the mode | |
7132 | because such a count will have a different meaning in a | |
6139ff20 | 7133 | wider mode. */ |
f6785026 RK |
7134 | |
7135 | if (! (GET_CODE (XEXP (x, 1)) == CONST_INT | |
6139ff20 | 7136 | && INTVAL (XEXP (x, 1)) >= 0 |
f6785026 RK |
7137 | && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)) |
7138 | && ! (GET_MODE (XEXP (x, 1)) != VOIDmode | |
7139 | && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1))) | |
adb7a1cb | 7140 | < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode)))) |
f6785026 | 7141 | break; |
663522cb | 7142 | |
6139ff20 RK |
7143 | /* If the shift count is a constant and we can do arithmetic in |
7144 | the mode of the shift, refine which bits we need. Otherwise, use the | |
7145 | conservative form of the mask. */ | |
7146 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
7147 | && INTVAL (XEXP (x, 1)) >= 0 | |
7148 | && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode) | |
7149 | && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT) | |
7150 | mask >>= INTVAL (XEXP (x, 1)); | |
7151 | else | |
7152 | mask = fuller_mask; | |
7153 | ||
4de249d9 PB |
7154 | op0 = gen_lowpart (op_mode, |
7155 | force_to_mode (XEXP (x, 0), op_mode, | |
7156 | mask, reg, next_select)); | |
6139ff20 RK |
7157 | |
7158 | if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0)) | |
1999435c | 7159 | x = gen_binary (code, op_mode, op0, XEXP (x, 1)); |
d0ab8cd3 | 7160 | break; |
dfbe1b2f RK |
7161 | |
7162 | case LSHIFTRT: | |
1347292b JW |
7163 | /* Here we can only do something if the shift count is a constant, |
7164 | this shift constant is valid for the host, and we can do arithmetic | |
7165 | in OP_MODE. */ | |
dfbe1b2f RK |
7166 | |
7167 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
1347292b | 7168 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT |
6139ff20 | 7169 | && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT) |
d0ab8cd3 | 7170 | { |
6139ff20 | 7171 | rtx inner = XEXP (x, 0); |
402b6c2a | 7172 | unsigned HOST_WIDE_INT inner_mask; |
6139ff20 RK |
7173 | |
7174 | /* Select the mask of the bits we need for the shift operand. */ | |
402b6c2a | 7175 | inner_mask = mask << INTVAL (XEXP (x, 1)); |
d0ab8cd3 | 7176 | |
6139ff20 | 7177 | /* We can only change the mode of the shift if we can do arithmetic |
402b6c2a | 7178 | in the mode of the shift and INNER_MASK is no wider than the |
f3b2657c JJ |
7179 | width of X's mode. */ |
7180 | if ((inner_mask & ~GET_MODE_MASK (GET_MODE (x))) != 0) | |
d0ab8cd3 RK |
7181 | op_mode = GET_MODE (x); |
7182 | ||
402b6c2a | 7183 | inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select); |
6139ff20 RK |
7184 | |
7185 | if (GET_MODE (x) != op_mode || inner != XEXP (x, 0)) | |
1999435c | 7186 | x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1)); |
d0ab8cd3 | 7187 | } |
6139ff20 RK |
7188 | |
7189 | /* If we have (and (lshiftrt FOO C1) C2) where the combination of the | |
7190 | shift and AND produces only copies of the sign bit (C2 is one less | |
7191 | than a power of two), we can do this with just a shift. */ | |
7192 | ||
7193 | if (GET_CODE (x) == LSHIFTRT | |
7194 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
cfff35c1 JW |
7195 | /* The shift puts one of the sign bit copies in the least significant |
7196 | bit. */ | |
6139ff20 RK |
7197 | && ((INTVAL (XEXP (x, 1)) |
7198 | + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))) | |
7199 | >= GET_MODE_BITSIZE (GET_MODE (x))) | |
7200 | && exact_log2 (mask + 1) >= 0 | |
cfff35c1 JW |
7201 | /* Number of bits left after the shift must be more than the mask |
7202 | needs. */ | |
7203 | && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1)) | |
7204 | <= GET_MODE_BITSIZE (GET_MODE (x))) | |
7205 | /* Must be more sign bit copies than the mask needs. */ | |
770ae6cc | 7206 | && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))) |
6139ff20 | 7207 | >= exact_log2 (mask + 1))) |
1999435c PB |
7208 | x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), |
7209 | GEN_INT (GET_MODE_BITSIZE (GET_MODE (x)) | |
7210 | - exact_log2 (mask + 1))); | |
fae2db47 JW |
7211 | |
7212 | goto shiftrt; | |
d0ab8cd3 RK |
7213 | |
7214 | case ASHIFTRT: | |
6139ff20 RK |
7215 | /* If we are just looking for the sign bit, we don't need this shift at |
7216 | all, even if it has a variable count. */ | |
9bf22b75 | 7217 | if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT |
e51712db | 7218 | && (mask == ((unsigned HOST_WIDE_INT) 1 |
9bf22b75 | 7219 | << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) |
e3d616e3 | 7220 | return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select); |
6139ff20 RK |
7221 | |
7222 | /* If this is a shift by a constant, get a mask that contains those bits | |
7223 | that are not copies of the sign bit. We then have two cases: If | |
7224 | MASK only includes those bits, this can be a logical shift, which may | |
7225 | allow simplifications. If MASK is a single-bit field not within | |
7226 | those bits, we are requesting a copy of the sign bit and hence can | |
7227 | shift the sign bit to the appropriate location. */ | |
7228 | ||
7229 | if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0 | |
7230 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) | |
7231 | { | |
7232 | int i = -1; | |
7233 | ||
3e92902c | 7234 | /* If the considered data is wider than HOST_WIDE_INT, we can't |
b69960ac RK |
7235 | represent a mask for all its bits in a single scalar. |
7236 | But we only care about the lower bits, so calculate these. */ | |
7237 | ||
6a11342f | 7238 | if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT) |
b69960ac | 7239 | { |
663522cb | 7240 | nonzero = ~(HOST_WIDE_INT) 0; |
b69960ac RK |
7241 | |
7242 | /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1)) | |
7243 | is the number of bits a full-width mask would have set. | |
7244 | We need only shift if these are fewer than nonzero can | |
7245 | hold. If not, we must keep all bits set in nonzero. */ | |
7246 | ||
7247 | if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1)) | |
7248 | < HOST_BITS_PER_WIDE_INT) | |
7249 | nonzero >>= INTVAL (XEXP (x, 1)) | |
7250 | + HOST_BITS_PER_WIDE_INT | |
7251 | - GET_MODE_BITSIZE (GET_MODE (x)) ; | |
7252 | } | |
7253 | else | |
7254 | { | |
7255 | nonzero = GET_MODE_MASK (GET_MODE (x)); | |
7256 | nonzero >>= INTVAL (XEXP (x, 1)); | |
7257 | } | |
6139ff20 | 7258 | |
663522cb | 7259 | if ((mask & ~nonzero) == 0 |
6139ff20 RK |
7260 | || (i = exact_log2 (mask)) >= 0) |
7261 | { | |
7262 | x = simplify_shift_const | |
7263 | (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0), | |
7264 | i < 0 ? INTVAL (XEXP (x, 1)) | |
7265 | : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i); | |
7266 | ||
7267 | if (GET_CODE (x) != ASHIFTRT) | |
e3d616e3 | 7268 | return force_to_mode (x, mode, mask, reg, next_select); |
6139ff20 RK |
7269 | } |
7270 | } | |
7271 | ||
e0a2f705 | 7272 | /* If MASK is 1, convert this to an LSHIFTRT. This can be done |
6139ff20 RK |
7273 | even if the shift count isn't a constant. */ |
7274 | if (mask == 1) | |
1999435c | 7275 | x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)); |
6139ff20 | 7276 | |
fae2db47 JW |
7277 | shiftrt: |
7278 | ||
7279 | /* If this is a zero- or sign-extension operation that just affects bits | |
4c002f29 RK |
7280 | we don't care about, remove it. Be sure the call above returned |
7281 | something that is still a shift. */ | |
d0ab8cd3 | 7282 | |
4c002f29 RK |
7283 | if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT) |
7284 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
d0ab8cd3 | 7285 | && INTVAL (XEXP (x, 1)) >= 0 |
6139ff20 RK |
7286 | && (INTVAL (XEXP (x, 1)) |
7287 | <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1)) | |
d0ab8cd3 | 7288 | && GET_CODE (XEXP (x, 0)) == ASHIFT |
fa9ea255 | 7289 | && XEXP (XEXP (x, 0), 1) == XEXP (x, 1)) |
e3d616e3 RK |
7290 | return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask, |
7291 | reg, next_select); | |
6139ff20 | 7292 | |
dfbe1b2f RK |
7293 | break; |
7294 | ||
6139ff20 RK |
7295 | case ROTATE: |
7296 | case ROTATERT: | |
7297 | /* If the shift count is constant and we can do computations | |
7298 | in the mode of X, compute where the bits we care about are. | |
7299 | Otherwise, we can't do anything. Don't change the mode of | |
7300 | the shift or propagate MODE into the shift, though. */ | |
7301 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
7302 | && INTVAL (XEXP (x, 1)) >= 0) | |
7303 | { | |
7304 | temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE, | |
7305 | GET_MODE (x), GEN_INT (mask), | |
7306 | XEXP (x, 1)); | |
e869aa39 | 7307 | if (temp && GET_CODE (temp) == CONST_INT) |
6139ff20 RK |
7308 | SUBST (XEXP (x, 0), |
7309 | force_to_mode (XEXP (x, 0), GET_MODE (x), | |
e3d616e3 | 7310 | INTVAL (temp), reg, next_select)); |
6139ff20 RK |
7311 | } |
7312 | break; | |
663522cb | 7313 | |
dfbe1b2f | 7314 | case NEG: |
180b8e4b | 7315 | /* If we just want the low-order bit, the NEG isn't needed since it |
3ef42a0c | 7316 | won't change the low-order bit. */ |
180b8e4b RK |
7317 | if (mask == 1) |
7318 | return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select); | |
7319 | ||
6139ff20 RK |
7320 | /* We need any bits less significant than the most significant bit in |
7321 | MASK since carries from those bits will affect the bits we are | |
7322 | interested in. */ | |
7323 | mask = fuller_mask; | |
7324 | goto unop; | |
7325 | ||
dfbe1b2f | 7326 | case NOT: |
6139ff20 RK |
7327 | /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the |
7328 | same as the XOR case above. Ensure that the constant we form is not | |
7329 | wider than the mode of X. */ | |
7330 | ||
7331 | if (GET_CODE (XEXP (x, 0)) == LSHIFTRT | |
7332 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
7333 | && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 | |
7334 | && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask) | |
7335 | < GET_MODE_BITSIZE (GET_MODE (x))) | |
7336 | && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT) | |
7337 | { | |
6a04f4e0 AM |
7338 | temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)), |
7339 | GET_MODE (x)); | |
1999435c PB |
7340 | temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp); |
7341 | x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1)); | |
6139ff20 | 7342 | |
e3d616e3 | 7343 | return force_to_mode (x, mode, mask, reg, next_select); |
6139ff20 RK |
7344 | } |
7345 | ||
f82da7d2 JW |
7346 | /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must |
7347 | use the full mask inside the NOT. */ | |
7348 | mask = fuller_mask; | |
7349 | ||
6139ff20 | 7350 | unop: |
4de249d9 PB |
7351 | op0 = gen_lowpart (op_mode, |
7352 | force_to_mode (XEXP (x, 0), mode, mask, | |
7353 | reg, next_select)); | |
6139ff20 | 7354 | if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0)) |
f1c6ba8b | 7355 | x = simplify_gen_unary (code, op_mode, op0, op_mode); |
6139ff20 RK |
7356 | break; |
7357 | ||
7358 | case NE: | |
7359 | /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included | |
3aceff0d | 7360 | in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero, |
1a6ec070 | 7361 | which is equal to STORE_FLAG_VALUE. */ |
663522cb | 7362 | if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx |
7d103eb5 | 7363 | && GET_MODE (XEXP (x, 0)) == mode |
3aceff0d | 7364 | && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0 |
43196589 AS |
7365 | && (nonzero_bits (XEXP (x, 0), mode) |
7366 | == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE)) | |
e3d616e3 | 7367 | return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select); |
6139ff20 | 7368 | |
d0ab8cd3 RK |
7369 | break; |
7370 | ||
7371 | case IF_THEN_ELSE: | |
7372 | /* We have no way of knowing if the IF_THEN_ELSE can itself be | |
7373 | written in a narrower mode. We play it safe and do not do so. */ | |
7374 | ||
7375 | SUBST (XEXP (x, 1), | |
4de249d9 | 7376 | gen_lowpart (GET_MODE (x), |
d0ab8cd3 | 7377 | force_to_mode (XEXP (x, 1), mode, |
e3d616e3 | 7378 | mask, reg, next_select))); |
d0ab8cd3 | 7379 | SUBST (XEXP (x, 2), |
4de249d9 | 7380 | gen_lowpart (GET_MODE (x), |
d0ab8cd3 | 7381 | force_to_mode (XEXP (x, 2), mode, |
e869aa39 | 7382 | mask, reg, next_select))); |
d0ab8cd3 | 7383 | break; |
663522cb | 7384 | |
e9a25f70 JL |
7385 | default: |
7386 | break; | |
dfbe1b2f RK |
7387 | } |
7388 | ||
d0ab8cd3 | 7389 | /* Ensure we return a value of the proper mode. */ |
4de249d9 | 7390 | return gen_lowpart (mode, x); |
dfbe1b2f RK |
7391 | } |
7392 | \f | |
abe6e52f RK |
7393 | /* Return nonzero if X is an expression that has one of two values depending on |
7394 | whether some other value is zero or nonzero. In that case, we return the | |
7395 | value that is being tested, *PTRUE is set to the value if the rtx being | |
7396 | returned has a nonzero value, and *PFALSE is set to the other alternative. | |
7397 | ||
7398 | If we return zero, we set *PTRUE and *PFALSE to X. */ | |
7399 | ||
7400 | static rtx | |
79a490a9 | 7401 | if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse) |
abe6e52f RK |
7402 | { |
7403 | enum machine_mode mode = GET_MODE (x); | |
7404 | enum rtx_code code = GET_CODE (x); | |
abe6e52f RK |
7405 | rtx cond0, cond1, true0, true1, false0, false1; |
7406 | unsigned HOST_WIDE_INT nz; | |
7407 | ||
14a774a9 RK |
7408 | /* If we are comparing a value against zero, we are done. */ |
7409 | if ((code == NE || code == EQ) | |
87d9741e | 7410 | && XEXP (x, 1) == const0_rtx) |
14a774a9 | 7411 | { |
e8758a3a JL |
7412 | *ptrue = (code == NE) ? const_true_rtx : const0_rtx; |
7413 | *pfalse = (code == NE) ? const0_rtx : const_true_rtx; | |
14a774a9 RK |
7414 | return XEXP (x, 0); |
7415 | } | |
7416 | ||
abe6e52f RK |
7417 | /* If this is a unary operation whose operand has one of two values, apply |
7418 | our opcode to compute those values. */ | |
ec8e098d | 7419 | else if (UNARY_P (x) |
14a774a9 | 7420 | && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0) |
abe6e52f | 7421 | { |
f1c6ba8b RK |
7422 | *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0))); |
7423 | *pfalse = simplify_gen_unary (code, mode, false0, | |
7424 | GET_MODE (XEXP (x, 0))); | |
abe6e52f RK |
7425 | return cond0; |
7426 | } | |
7427 | ||
3a19aabc | 7428 | /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would |
ddd5a7c1 | 7429 | make can't possibly match and would suppress other optimizations. */ |
3a19aabc RK |
7430 | else if (code == COMPARE) |
7431 | ; | |
7432 | ||
abe6e52f RK |
7433 | /* If this is a binary operation, see if either side has only one of two |
7434 | values. If either one does or if both do and they are conditional on | |
7435 | the same value, compute the new true and false values. */ | |
ec8e098d | 7436 | else if (BINARY_P (x)) |
abe6e52f RK |
7437 | { |
7438 | cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0); | |
7439 | cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1); | |
7440 | ||
7441 | if ((cond0 != 0 || cond1 != 0) | |
7442 | && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1))) | |
7443 | { | |
987e845a JW |
7444 | /* If if_then_else_cond returned zero, then true/false are the |
7445 | same rtl. We must copy one of them to prevent invalid rtl | |
7446 | sharing. */ | |
7447 | if (cond0 == 0) | |
7448 | true0 = copy_rtx (true0); | |
7449 | else if (cond1 == 0) | |
7450 | true1 = copy_rtx (true1); | |
7451 | ||
1999435c PB |
7452 | *ptrue = gen_binary (code, mode, true0, true1); |
7453 | *pfalse = gen_binary (code, mode, false0, false1); | |
abe6e52f RK |
7454 | return cond0 ? cond0 : cond1; |
7455 | } | |
9210df58 | 7456 | |
9210df58 | 7457 | /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the |
da7d8304 | 7458 | operands is zero when the other is nonzero, and vice-versa, |
0802d516 | 7459 | and STORE_FLAG_VALUE is 1 or -1. */ |
9210df58 | 7460 | |
0802d516 RK |
7461 | if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) |
7462 | && (code == PLUS || code == IOR || code == XOR || code == MINUS | |
663522cb | 7463 | || code == UMAX) |
9210df58 RK |
7464 | && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT) |
7465 | { | |
7466 | rtx op0 = XEXP (XEXP (x, 0), 1); | |
7467 | rtx op1 = XEXP (XEXP (x, 1), 1); | |
7468 | ||
7469 | cond0 = XEXP (XEXP (x, 0), 0); | |
7470 | cond1 = XEXP (XEXP (x, 1), 0); | |
7471 | ||
ec8e098d PB |
7472 | if (COMPARISON_P (cond0) |
7473 | && COMPARISON_P (cond1) | |
9a915772 | 7474 | && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1) |
9210df58 RK |
7475 | && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0)) |
7476 | && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1))) | |
7477 | || ((swap_condition (GET_CODE (cond0)) | |
9a915772 | 7478 | == combine_reversed_comparison_code (cond1)) |
9210df58 RK |
7479 | && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1)) |
7480 | && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0)))) | |
7481 | && ! side_effects_p (x)) | |
7482 | { | |
1999435c PB |
7483 | *ptrue = gen_binary (MULT, mode, op0, const_true_rtx); |
7484 | *pfalse = gen_binary (MULT, mode, | |
7485 | (code == MINUS | |
7486 | ? simplify_gen_unary (NEG, mode, op1, | |
7487 | mode) | |
7488 | : op1), | |
7489 | const_true_rtx); | |
9210df58 RK |
7490 | return cond0; |
7491 | } | |
7492 | } | |
7493 | ||
eaec9b3d | 7494 | /* Similarly for MULT, AND and UMIN, except that for these the result |
9210df58 | 7495 | is always zero. */ |
0802d516 RK |
7496 | if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) |
7497 | && (code == MULT || code == AND || code == UMIN) | |
9210df58 RK |
7498 | && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT) |
7499 | { | |
7500 | cond0 = XEXP (XEXP (x, 0), 0); | |
7501 | cond1 = XEXP (XEXP (x, 1), 0); | |
7502 | ||
ec8e098d PB |
7503 | if (COMPARISON_P (cond0) |
7504 | && COMPARISON_P (cond1) | |
9a915772 | 7505 | && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1) |
9210df58 RK |
7506 | && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0)) |
7507 | && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1))) | |
7508 | || ((swap_condition (GET_CODE (cond0)) | |
9a915772 | 7509 | == combine_reversed_comparison_code (cond1)) |
9210df58 RK |
7510 | && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1)) |
7511 | && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0)))) | |
7512 | && ! side_effects_p (x)) | |
7513 | { | |
7514 | *ptrue = *pfalse = const0_rtx; | |
7515 | return cond0; | |
7516 | } | |
7517 | } | |
abe6e52f RK |
7518 | } |
7519 | ||
7520 | else if (code == IF_THEN_ELSE) | |
7521 | { | |
7522 | /* If we have IF_THEN_ELSE already, extract the condition and | |
7523 | canonicalize it if it is NE or EQ. */ | |
7524 | cond0 = XEXP (x, 0); | |
7525 | *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2); | |
7526 | if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx) | |
7527 | return XEXP (cond0, 0); | |
7528 | else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx) | |
7529 | { | |
7530 | *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1); | |
7531 | return XEXP (cond0, 0); | |
7532 | } | |
7533 | else | |
7534 | return cond0; | |
7535 | } | |
7536 | ||
0631e0bf JH |
7537 | /* If X is a SUBREG, we can narrow both the true and false values |
7538 | if the inner expression, if there is a condition. */ | |
7539 | else if (code == SUBREG | |
abe6e52f RK |
7540 | && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x), |
7541 | &true0, &false0))) | |
7542 | { | |
bbe708a3 UW |
7543 | true0 = simplify_gen_subreg (mode, true0, |
7544 | GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x)); | |
7545 | false0 = simplify_gen_subreg (mode, false0, | |
0631e0bf | 7546 | GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x)); |
bbe708a3 UW |
7547 | if (true0 && false0) |
7548 | { | |
7549 | *ptrue = true0; | |
7550 | *pfalse = false0; | |
7551 | return cond0; | |
7552 | } | |
abe6e52f RK |
7553 | } |
7554 | ||
7555 | /* If X is a constant, this isn't special and will cause confusions | |
7556 | if we treat it as such. Likewise if it is equivalent to a constant. */ | |
7557 | else if (CONSTANT_P (x) | |
7558 | || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0))) | |
7559 | ; | |
7560 | ||
1f3f36d1 RH |
7561 | /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that |
7562 | will be least confusing to the rest of the compiler. */ | |
7563 | else if (mode == BImode) | |
7564 | { | |
7565 | *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx; | |
7566 | return x; | |
7567 | } | |
7568 | ||
663522cb | 7569 | /* If X is known to be either 0 or -1, those are the true and |
abe6e52f | 7570 | false values when testing X. */ |
49219895 JH |
7571 | else if (x == constm1_rtx || x == const0_rtx |
7572 | || (mode != VOIDmode | |
7573 | && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode))) | |
abe6e52f RK |
7574 | { |
7575 | *ptrue = constm1_rtx, *pfalse = const0_rtx; | |
7576 | return x; | |
7577 | } | |
7578 | ||
7579 | /* Likewise for 0 or a single bit. */ | |
9eb54558 | 7580 | else if (SCALAR_INT_MODE_P (mode) |
49219895 JH |
7581 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
7582 | && exact_log2 (nz = nonzero_bits (x, mode)) >= 0) | |
abe6e52f | 7583 | { |
578fc63d | 7584 | *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx; |
abe6e52f RK |
7585 | return x; |
7586 | } | |
7587 | ||
7588 | /* Otherwise fail; show no condition with true and false values the same. */ | |
7589 | *ptrue = *pfalse = x; | |
7590 | return 0; | |
7591 | } | |
7592 | \f | |
1a26b032 RK |
7593 | /* Return the value of expression X given the fact that condition COND |
7594 | is known to be true when applied to REG as its first operand and VAL | |
7595 | as its second. X is known to not be shared and so can be modified in | |
7596 | place. | |
7597 | ||
7598 | We only handle the simplest cases, and specifically those cases that | |
7599 | arise with IF_THEN_ELSE expressions. */ | |
7600 | ||
7601 | static rtx | |
79a490a9 | 7602 | known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val) |
1a26b032 RK |
7603 | { |
7604 | enum rtx_code code = GET_CODE (x); | |
f24ad0e4 | 7605 | rtx temp; |
6f7d635c | 7606 | const char *fmt; |
1a26b032 RK |
7607 | int i, j; |
7608 | ||
7609 | if (side_effects_p (x)) | |
7610 | return x; | |
7611 | ||
805f1694 JL |
7612 | /* If either operand of the condition is a floating point value, |
7613 | then we have to avoid collapsing an EQ comparison. */ | |
7614 | if (cond == EQ | |
7615 | && rtx_equal_p (x, reg) | |
7616 | && ! FLOAT_MODE_P (GET_MODE (x)) | |
7617 | && ! FLOAT_MODE_P (GET_MODE (val))) | |
69bc0a1f | 7618 | return val; |
805f1694 | 7619 | |
69bc0a1f | 7620 | if (cond == UNEQ && rtx_equal_p (x, reg)) |
1a26b032 RK |
7621 | return val; |
7622 | ||
7623 | /* If X is (abs REG) and we know something about REG's relationship | |
7624 | with zero, we may be able to simplify this. */ | |
7625 | ||
7626 | if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx) | |
7627 | switch (cond) | |
7628 | { | |
7629 | case GE: case GT: case EQ: | |
7630 | return XEXP (x, 0); | |
7631 | case LT: case LE: | |
f1c6ba8b RK |
7632 | return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)), |
7633 | XEXP (x, 0), | |
7634 | GET_MODE (XEXP (x, 0))); | |
e9a25f70 JL |
7635 | default: |
7636 | break; | |
1a26b032 RK |
7637 | } |
7638 | ||
7639 | /* The only other cases we handle are MIN, MAX, and comparisons if the | |
7640 | operands are the same as REG and VAL. */ | |
7641 | ||
ec8e098d | 7642 | else if (COMPARISON_P (x) || COMMUTATIVE_ARITH_P (x)) |
1a26b032 RK |
7643 | { |
7644 | if (rtx_equal_p (XEXP (x, 0), val)) | |
7645 | cond = swap_condition (cond), temp = val, val = reg, reg = temp; | |
7646 | ||
7647 | if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val)) | |
7648 | { | |
ec8e098d | 7649 | if (COMPARISON_P (x)) |
1eb8759b RH |
7650 | { |
7651 | if (comparison_dominates_p (cond, code)) | |
7652 | return const_true_rtx; | |
1a26b032 | 7653 | |
9a915772 | 7654 | code = combine_reversed_comparison_code (x); |
1eb8759b RH |
7655 | if (code != UNKNOWN |
7656 | && comparison_dominates_p (cond, code)) | |
7657 | return const0_rtx; | |
7658 | else | |
7659 | return x; | |
7660 | } | |
1a26b032 RK |
7661 | else if (code == SMAX || code == SMIN |
7662 | || code == UMIN || code == UMAX) | |
7663 | { | |
7664 | int unsignedp = (code == UMIN || code == UMAX); | |
7665 | ||
ac4cdf40 JE |
7666 | /* Do not reverse the condition when it is NE or EQ. |
7667 | This is because we cannot conclude anything about | |
7668 | the value of 'SMAX (x, y)' when x is not equal to y, | |
23190837 | 7669 | but we can when x equals y. */ |
ac4cdf40 JE |
7670 | if ((code == SMAX || code == UMAX) |
7671 | && ! (cond == EQ || cond == NE)) | |
1a26b032 RK |
7672 | cond = reverse_condition (cond); |
7673 | ||
7674 | switch (cond) | |
7675 | { | |
7676 | case GE: case GT: | |
7677 | return unsignedp ? x : XEXP (x, 1); | |
7678 | case LE: case LT: | |
7679 | return unsignedp ? x : XEXP (x, 0); | |
7680 | case GEU: case GTU: | |
7681 | return unsignedp ? XEXP (x, 1) : x; | |
7682 | case LEU: case LTU: | |
7683 | return unsignedp ? XEXP (x, 0) : x; | |
e9a25f70 JL |
7684 | default: |
7685 | break; | |
1a26b032 RK |
7686 | } |
7687 | } | |
7688 | } | |
7689 | } | |
9a360704 AO |
7690 | else if (code == SUBREG) |
7691 | { | |
7692 | enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x)); | |
7693 | rtx new, r = known_cond (SUBREG_REG (x), cond, reg, val); | |
7694 | ||
7695 | if (SUBREG_REG (x) != r) | |
7696 | { | |
7697 | /* We must simplify subreg here, before we lose track of the | |
7698 | original inner_mode. */ | |
7699 | new = simplify_subreg (GET_MODE (x), r, | |
7700 | inner_mode, SUBREG_BYTE (x)); | |
7701 | if (new) | |
7702 | return new; | |
7703 | else | |
7704 | SUBST (SUBREG_REG (x), r); | |
7705 | } | |
7706 | ||
7707 | return x; | |
7708 | } | |
4161da12 AO |
7709 | /* We don't have to handle SIGN_EXTEND here, because even in the |
7710 | case of replacing something with a modeless CONST_INT, a | |
7711 | CONST_INT is already (supposed to be) a valid sign extension for | |
7712 | its narrower mode, which implies it's already properly | |
7713 | sign-extended for the wider mode. Now, for ZERO_EXTEND, the | |
7714 | story is different. */ | |
7715 | else if (code == ZERO_EXTEND) | |
7716 | { | |
7717 | enum machine_mode inner_mode = GET_MODE (XEXP (x, 0)); | |
7718 | rtx new, r = known_cond (XEXP (x, 0), cond, reg, val); | |
7719 | ||
7720 | if (XEXP (x, 0) != r) | |
7721 | { | |
7722 | /* We must simplify the zero_extend here, before we lose | |
7723 | track of the original inner_mode. */ | |
7724 | new = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x), | |
7725 | r, inner_mode); | |
7726 | if (new) | |
7727 | return new; | |
7728 | else | |
7729 | SUBST (XEXP (x, 0), r); | |
7730 | } | |
7731 | ||
7732 | return x; | |
7733 | } | |
1a26b032 RK |
7734 | |
7735 | fmt = GET_RTX_FORMAT (code); | |
7736 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
7737 | { | |
7738 | if (fmt[i] == 'e') | |
7739 | SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val)); | |
7740 | else if (fmt[i] == 'E') | |
7741 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
7742 | SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j), | |
7743 | cond, reg, val)); | |
7744 | } | |
7745 | ||
7746 | return x; | |
7747 | } | |
7748 | \f | |
e11fa86f RK |
7749 | /* See if X and Y are equal for the purposes of seeing if we can rewrite an |
7750 | assignment as a field assignment. */ | |
7751 | ||
7752 | static int | |
79a490a9 | 7753 | rtx_equal_for_field_assignment_p (rtx x, rtx y) |
e11fa86f | 7754 | { |
e11fa86f RK |
7755 | if (x == y || rtx_equal_p (x, y)) |
7756 | return 1; | |
7757 | ||
7758 | if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y)) | |
7759 | return 0; | |
7760 | ||
7761 | /* Check for a paradoxical SUBREG of a MEM compared with the MEM. | |
7762 | Note that all SUBREGs of MEM are paradoxical; otherwise they | |
7763 | would have been rewritten. */ | |
3c0cb5de JQ |
7764 | if (MEM_P (x) && GET_CODE (y) == SUBREG |
7765 | && MEM_P (SUBREG_REG (y)) | |
e11fa86f | 7766 | && rtx_equal_p (SUBREG_REG (y), |
4de249d9 | 7767 | gen_lowpart (GET_MODE (SUBREG_REG (y)), x))) |
e11fa86f RK |
7768 | return 1; |
7769 | ||
3c0cb5de JQ |
7770 | if (MEM_P (y) && GET_CODE (x) == SUBREG |
7771 | && MEM_P (SUBREG_REG (x)) | |
e11fa86f | 7772 | && rtx_equal_p (SUBREG_REG (x), |
4de249d9 | 7773 | gen_lowpart (GET_MODE (SUBREG_REG (x)), y))) |
e11fa86f RK |
7774 | return 1; |
7775 | ||
9ec36da5 JL |
7776 | /* We used to see if get_last_value of X and Y were the same but that's |
7777 | not correct. In one direction, we'll cause the assignment to have | |
7778 | the wrong destination and in the case, we'll import a register into this | |
7779 | insn that might have already have been dead. So fail if none of the | |
7780 | above cases are true. */ | |
7781 | return 0; | |
e11fa86f RK |
7782 | } |
7783 | \f | |
230d793d RS |
7784 | /* See if X, a SET operation, can be rewritten as a bit-field assignment. |
7785 | Return that assignment if so. | |
7786 | ||
7787 | We only handle the most common cases. */ | |
7788 | ||
7789 | static rtx | |
79a490a9 | 7790 | make_field_assignment (rtx x) |
230d793d RS |
7791 | { |
7792 | rtx dest = SET_DEST (x); | |
7793 | rtx src = SET_SRC (x); | |
dfbe1b2f | 7794 | rtx assign; |
e11fa86f | 7795 | rtx rhs, lhs; |
5f4f0e22 | 7796 | HOST_WIDE_INT c1; |
770ae6cc RK |
7797 | HOST_WIDE_INT pos; |
7798 | unsigned HOST_WIDE_INT len; | |
dfbe1b2f RK |
7799 | rtx other; |
7800 | enum machine_mode mode; | |
230d793d RS |
7801 | |
7802 | /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is | |
7803 | a clear of a one-bit field. We will have changed it to | |
7804 | (and (rotate (const_int -2) POS) DEST), so check for that. Also check | |
7805 | for a SUBREG. */ | |
7806 | ||
7807 | if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE | |
7808 | && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT | |
7809 | && INTVAL (XEXP (XEXP (src, 0), 0)) == -2 | |
e11fa86f | 7810 | && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1))) |
230d793d | 7811 | { |
8999a12e | 7812 | assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1), |
230d793d | 7813 | 1, 1, 1, 0); |
76184def | 7814 | if (assign != 0) |
38a448ca | 7815 | return gen_rtx_SET (VOIDmode, assign, const0_rtx); |
76184def | 7816 | return x; |
230d793d RS |
7817 | } |
7818 | ||
7819 | else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG | |
7820 | && subreg_lowpart_p (XEXP (src, 0)) | |
663522cb | 7821 | && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0))) |
230d793d RS |
7822 | < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0))))) |
7823 | && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE | |
b9dc9cf1 | 7824 | && GET_CODE (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == CONST_INT |
230d793d | 7825 | && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2 |
e11fa86f | 7826 | && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1))) |
230d793d | 7827 | { |
8999a12e | 7828 | assign = make_extraction (VOIDmode, dest, 0, |
230d793d RS |
7829 | XEXP (SUBREG_REG (XEXP (src, 0)), 1), |
7830 | 1, 1, 1, 0); | |
76184def | 7831 | if (assign != 0) |
38a448ca | 7832 | return gen_rtx_SET (VOIDmode, assign, const0_rtx); |
76184def | 7833 | return x; |
230d793d RS |
7834 | } |
7835 | ||
9dd11dcb | 7836 | /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a |
230d793d RS |
7837 | one-bit field. */ |
7838 | else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT | |
7839 | && XEXP (XEXP (src, 0), 0) == const1_rtx | |
e11fa86f | 7840 | && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1))) |
230d793d | 7841 | { |
8999a12e | 7842 | assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1), |
230d793d | 7843 | 1, 1, 1, 0); |
76184def | 7844 | if (assign != 0) |
38a448ca | 7845 | return gen_rtx_SET (VOIDmode, assign, const1_rtx); |
76184def | 7846 | return x; |
230d793d RS |
7847 | } |
7848 | ||
dfbe1b2f | 7849 | /* The other case we handle is assignments into a constant-position |
9dd11dcb | 7850 | field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents |
dfbe1b2f RK |
7851 | a mask that has all one bits except for a group of zero bits and |
7852 | OTHER is known to have zeros where C1 has ones, this is such an | |
7853 | assignment. Compute the position and length from C1. Shift OTHER | |
7854 | to the appropriate position, force it to the required mode, and | |
7855 | make the extraction. Check for the AND in both operands. */ | |
7856 | ||
9dd11dcb | 7857 | if (GET_CODE (src) != IOR && GET_CODE (src) != XOR) |
e11fa86f RK |
7858 | return x; |
7859 | ||
7860 | rhs = expand_compound_operation (XEXP (src, 0)); | |
7861 | lhs = expand_compound_operation (XEXP (src, 1)); | |
7862 | ||
7863 | if (GET_CODE (rhs) == AND | |
7864 | && GET_CODE (XEXP (rhs, 1)) == CONST_INT | |
7865 | && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest)) | |
7866 | c1 = INTVAL (XEXP (rhs, 1)), other = lhs; | |
7867 | else if (GET_CODE (lhs) == AND | |
7868 | && GET_CODE (XEXP (lhs, 1)) == CONST_INT | |
7869 | && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest)) | |
7870 | c1 = INTVAL (XEXP (lhs, 1)), other = rhs; | |
dfbe1b2f RK |
7871 | else |
7872 | return x; | |
230d793d | 7873 | |
663522cb | 7874 | pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len); |
dfbe1b2f | 7875 | if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest)) |
e5e809f4 JL |
7876 | || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT |
7877 | || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0) | |
dfbe1b2f | 7878 | return x; |
230d793d | 7879 | |
5f4f0e22 | 7880 | assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0); |
76184def DE |
7881 | if (assign == 0) |
7882 | return x; | |
230d793d | 7883 | |
dfbe1b2f RK |
7884 | /* The mode to use for the source is the mode of the assignment, or of |
7885 | what is inside a possible STRICT_LOW_PART. */ | |
663522cb | 7886 | mode = (GET_CODE (assign) == STRICT_LOW_PART |
dfbe1b2f | 7887 | ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign)); |
230d793d | 7888 | |
dfbe1b2f RK |
7889 | /* Shift OTHER right POS places and make it the source, restricting it |
7890 | to the proper length and mode. */ | |
230d793d | 7891 | |
5f4f0e22 CH |
7892 | src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT, |
7893 | GET_MODE (src), other, pos), | |
6139ff20 RK |
7894 | mode, |
7895 | GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT | |
0345195a | 7896 | ? ~(unsigned HOST_WIDE_INT) 0 |
729a2125 | 7897 | : ((unsigned HOST_WIDE_INT) 1 << len) - 1, |
e3d616e3 | 7898 | dest, 0); |
230d793d | 7899 | |
6e814b8d KH |
7900 | /* If SRC is masked by an AND that does not make a difference in |
7901 | the value being stored, strip it. */ | |
7902 | if (GET_CODE (assign) == ZERO_EXTRACT | |
7903 | && GET_CODE (XEXP (assign, 1)) == CONST_INT | |
7904 | && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT | |
7905 | && GET_CODE (src) == AND | |
7906 | && GET_CODE (XEXP (src, 1)) == CONST_INT | |
c5c15353 | 7907 | && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (src, 1)) |
6e814b8d KH |
7908 | == ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1)) |
7909 | src = XEXP (src, 0); | |
7910 | ||
f1c6ba8b | 7911 | return gen_rtx_SET (VOIDmode, assign, src); |
230d793d RS |
7912 | } |
7913 | \f | |
7914 | /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c) | |
7915 | if so. */ | |
7916 | ||
7917 | static rtx | |
79a490a9 | 7918 | apply_distributive_law (rtx x) |
230d793d RS |
7919 | { |
7920 | enum rtx_code code = GET_CODE (x); | |
2981fafe | 7921 | enum rtx_code inner_code; |
230d793d RS |
7922 | rtx lhs, rhs, other; |
7923 | rtx tem; | |
230d793d | 7924 | |
2981fafe RS |
7925 | /* Distributivity is not true for floating point as it can change the |
7926 | value. So we don't do it unless -funsafe-math-optimizations. */ | |
7927 | if (FLOAT_MODE_P (GET_MODE (x)) | |
7928 | && ! flag_unsafe_math_optimizations) | |
d8a8a4da RS |
7929 | return x; |
7930 | ||
230d793d RS |
7931 | /* The outer operation can only be one of the following: */ |
7932 | if (code != IOR && code != AND && code != XOR | |
7933 | && code != PLUS && code != MINUS) | |
7934 | return x; | |
7935 | ||
2981fafe RS |
7936 | lhs = XEXP (x, 0); |
7937 | rhs = XEXP (x, 1); | |
230d793d | 7938 | |
0f41302f MS |
7939 | /* If either operand is a primitive we can't do anything, so get out |
7940 | fast. */ | |
ec8e098d | 7941 | if (OBJECT_P (lhs) || OBJECT_P (rhs)) |
230d793d RS |
7942 | return x; |
7943 | ||
7944 | lhs = expand_compound_operation (lhs); | |
7945 | rhs = expand_compound_operation (rhs); | |
7946 | inner_code = GET_CODE (lhs); | |
7947 | if (inner_code != GET_CODE (rhs)) | |
7948 | return x; | |
7949 | ||
7950 | /* See if the inner and outer operations distribute. */ | |
7951 | switch (inner_code) | |
7952 | { | |
7953 | case LSHIFTRT: | |
7954 | case ASHIFTRT: | |
7955 | case AND: | |
7956 | case IOR: | |
7957 | /* These all distribute except over PLUS. */ | |
7958 | if (code == PLUS || code == MINUS) | |
7959 | return x; | |
7960 | break; | |
7961 | ||
7962 | case MULT: | |
7963 | if (code != PLUS && code != MINUS) | |
7964 | return x; | |
7965 | break; | |
7966 | ||
7967 | case ASHIFT: | |
45620ed4 | 7968 | /* This is also a multiply, so it distributes over everything. */ |
230d793d RS |
7969 | break; |
7970 | ||
7971 | case SUBREG: | |
dfbe1b2f | 7972 | /* Non-paradoxical SUBREGs distributes over all operations, provided |
ddef6bc7 | 7973 | the inner modes and byte offsets are the same, this is an extraction |
2b4bd1bc JW |
7974 | of a low-order part, we don't convert an fp operation to int or |
7975 | vice versa, and we would not be converting a single-word | |
dfbe1b2f | 7976 | operation into a multi-word operation. The latter test is not |
2b4bd1bc | 7977 | required, but it prevents generating unneeded multi-word operations. |
dfbe1b2f RK |
7978 | Some of the previous tests are redundant given the latter test, but |
7979 | are retained because they are required for correctness. | |
7980 | ||
7981 | We produce the result slightly differently in this case. */ | |
7982 | ||
7983 | if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs)) | |
ddef6bc7 | 7984 | || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs) |
dfbe1b2f | 7985 | || ! subreg_lowpart_p (lhs) |
2b4bd1bc JW |
7986 | || (GET_MODE_CLASS (GET_MODE (lhs)) |
7987 | != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs)))) | |
dfbe1b2f | 7988 | || (GET_MODE_SIZE (GET_MODE (lhs)) |
8af24e26 | 7989 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs)))) |
dfbe1b2f | 7990 | || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD) |
230d793d RS |
7991 | return x; |
7992 | ||
1999435c PB |
7993 | tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)), |
7994 | SUBREG_REG (lhs), SUBREG_REG (rhs)); | |
4de249d9 | 7995 | return gen_lowpart (GET_MODE (x), tem); |
230d793d RS |
7996 | |
7997 | default: | |
7998 | return x; | |
7999 | } | |
8000 | ||
8001 | /* Set LHS and RHS to the inner operands (A and B in the example | |
8002 | above) and set OTHER to the common operand (C in the example). | |
ec8e098d | 8003 | There is only one way to do this unless the inner operation is |
230d793d | 8004 | commutative. */ |
ec8e098d | 8005 | if (COMMUTATIVE_ARITH_P (lhs) |
230d793d RS |
8006 | && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0))) |
8007 | other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1); | |
ec8e098d | 8008 | else if (COMMUTATIVE_ARITH_P (lhs) |
230d793d RS |
8009 | && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1))) |
8010 | other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0); | |
ec8e098d | 8011 | else if (COMMUTATIVE_ARITH_P (lhs) |
230d793d RS |
8012 | && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0))) |
8013 | other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1); | |
8014 | else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1))) | |
8015 | other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0); | |
8016 | else | |
8017 | return x; | |
8018 | ||
8019 | /* Form the new inner operation, seeing if it simplifies first. */ | |
1999435c | 8020 | tem = gen_binary (code, GET_MODE (x), lhs, rhs); |
230d793d RS |
8021 | |
8022 | /* There is one exception to the general way of distributing: | |
a0209ac2 | 8023 | (a | c) ^ (b | c) -> (a ^ b) & ~c */ |
230d793d RS |
8024 | if (code == XOR && inner_code == IOR) |
8025 | { | |
8026 | inner_code = AND; | |
f1c6ba8b | 8027 | other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x)); |
230d793d RS |
8028 | } |
8029 | ||
8030 | /* We may be able to continuing distributing the result, so call | |
8031 | ourselves recursively on the inner operation before forming the | |
8032 | outer operation, which we return. */ | |
1999435c PB |
8033 | return gen_binary (inner_code, GET_MODE (x), |
8034 | apply_distributive_law (tem), other); | |
230d793d RS |
8035 | } |
8036 | \f | |
8037 | /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done | |
8038 | in MODE. | |
8039 | ||
8040 | Return an equivalent form, if different from X. Otherwise, return X. If | |
8041 | X is zero, we are to always construct the equivalent form. */ | |
8042 | ||
8043 | static rtx | |
79a490a9 AJ |
8044 | simplify_and_const_int (rtx x, enum machine_mode mode, rtx varop, |
8045 | unsigned HOST_WIDE_INT constop) | |
230d793d | 8046 | { |
951553af | 8047 | unsigned HOST_WIDE_INT nonzero; |
42301240 | 8048 | int i; |
230d793d | 8049 | |
6139ff20 | 8050 | /* Simplify VAROP knowing that we will be only looking at some of the |
8bc52806 JL |
8051 | bits in it. |
8052 | ||
8053 | Note by passing in CONSTOP, we guarantee that the bits not set in | |
8054 | CONSTOP are not significant and will never be examined. We must | |
8055 | ensure that is the case by explicitly masking out those bits | |
8056 | before returning. */ | |
e3d616e3 | 8057 | varop = force_to_mode (varop, mode, constop, NULL_RTX, 0); |
230d793d | 8058 | |
8bc52806 JL |
8059 | /* If VAROP is a CLOBBER, we will fail so return it. */ |
8060 | if (GET_CODE (varop) == CLOBBER) | |
6139ff20 | 8061 | return varop; |
230d793d | 8062 | |
8bc52806 JL |
8063 | /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP |
8064 | to VAROP and return the new constant. */ | |
8065 | if (GET_CODE (varop) == CONST_INT) | |
8066 | return GEN_INT (trunc_int_for_mode (INTVAL (varop) & constop, mode)); | |
8067 | ||
fc06d7aa RK |
8068 | /* See what bits may be nonzero in VAROP. Unlike the general case of |
8069 | a call to nonzero_bits, here we don't care about bits outside | |
8070 | MODE. */ | |
8071 | ||
8072 | nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode); | |
9fa6d012 | 8073 | |
230d793d | 8074 | /* Turn off all bits in the constant that are known to already be zero. |
951553af | 8075 | Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS |
230d793d RS |
8076 | which is tested below. */ |
8077 | ||
951553af | 8078 | constop &= nonzero; |
230d793d RS |
8079 | |
8080 | /* If we don't have any bits left, return zero. */ | |
8081 | if (constop == 0) | |
8082 | return const0_rtx; | |
8083 | ||
42301240 | 8084 | /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is |
e0a2f705 | 8085 | a power of two, we can replace this with an ASHIFT. */ |
42301240 RK |
8086 | if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1 |
8087 | && (i = exact_log2 (constop)) >= 0) | |
8088 | return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i); | |
663522cb | 8089 | |
6139ff20 RK |
8090 | /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR |
8091 | or XOR, then try to apply the distributive law. This may eliminate | |
8092 | operations if either branch can be simplified because of the AND. | |
8093 | It may also make some cases more complex, but those cases probably | |
8094 | won't match a pattern either with or without this. */ | |
8095 | ||
8096 | if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR) | |
8097 | return | |
4de249d9 | 8098 | gen_lowpart |
6139ff20 RK |
8099 | (mode, |
8100 | apply_distributive_law | |
1999435c PB |
8101 | (gen_binary (GET_CODE (varop), GET_MODE (varop), |
8102 | simplify_and_const_int (NULL_RTX, GET_MODE (varop), | |
8103 | XEXP (varop, 0), constop), | |
8104 | simplify_and_const_int (NULL_RTX, GET_MODE (varop), | |
8105 | XEXP (varop, 1), constop)))); | |
6139ff20 | 8106 | |
8deb7514 RH |
8107 | /* If VAROP is PLUS, and the constant is a mask of low bite, distribute |
8108 | the AND and see if one of the operands simplifies to zero. If so, we | |
8109 | may eliminate it. */ | |
8110 | ||
8111 | if (GET_CODE (varop) == PLUS | |
8112 | && exact_log2 (constop + 1) >= 0) | |
8113 | { | |
8114 | rtx o0, o1; | |
8115 | ||
8116 | o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop); | |
8117 | o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop); | |
8118 | if (o0 == const0_rtx) | |
8119 | return o1; | |
8120 | if (o1 == const0_rtx) | |
8121 | return o0; | |
8122 | } | |
8123 | ||
230d793d RS |
8124 | /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG |
8125 | if we already had one (just check for the simplest cases). */ | |
8126 | if (x && GET_CODE (XEXP (x, 0)) == SUBREG | |
8127 | && GET_MODE (XEXP (x, 0)) == mode | |
8128 | && SUBREG_REG (XEXP (x, 0)) == varop) | |
8129 | varop = XEXP (x, 0); | |
8130 | else | |
4de249d9 | 8131 | varop = gen_lowpart (mode, varop); |
230d793d | 8132 | |
0f41302f | 8133 | /* If we can't make the SUBREG, try to return what we were given. */ |
230d793d RS |
8134 | if (GET_CODE (varop) == CLOBBER) |
8135 | return x ? x : varop; | |
8136 | ||
8137 | /* If we are only masking insignificant bits, return VAROP. */ | |
951553af | 8138 | if (constop == nonzero) |
230d793d | 8139 | x = varop; |
230d793d RS |
8140 | else |
8141 | { | |
d0c9db30 | 8142 | /* Otherwise, return an AND. */ |
3b5708e7 | 8143 | constop = trunc_int_for_mode (constop, mode); |
d0c9db30 AM |
8144 | /* See how much, if any, of X we can use. */ |
8145 | if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode) | |
1999435c | 8146 | x = gen_binary (AND, mode, varop, GEN_INT (constop)); |
230d793d | 8147 | |
d0c9db30 AM |
8148 | else |
8149 | { | |
8150 | if (GET_CODE (XEXP (x, 1)) != CONST_INT | |
8151 | || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop) | |
8152 | SUBST (XEXP (x, 1), GEN_INT (constop)); | |
8153 | ||
8154 | SUBST (XEXP (x, 0), varop); | |
8155 | } | |
230d793d RS |
8156 | } |
8157 | ||
8158 | return x; | |
8159 | } | |
8160 | \f | |
2f93eea8 | 8161 | /* Given a REG, X, compute which bits in X can be nonzero. |
230d793d RS |
8162 | We don't care about bits outside of those defined in MODE. |
8163 | ||
8164 | For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is | |
8165 | a shift, AND, or zero_extract, we can do better. */ | |
8166 | ||
2f93eea8 PB |
8167 | static rtx |
8168 | reg_nonzero_bits_for_combine (rtx x, enum machine_mode mode, | |
8169 | rtx known_x ATTRIBUTE_UNUSED, | |
8170 | enum machine_mode known_mode ATTRIBUTE_UNUSED, | |
8171 | unsigned HOST_WIDE_INT known_ret ATTRIBUTE_UNUSED, | |
8172 | unsigned HOST_WIDE_INT *nonzero) | |
230d793d | 8173 | { |
230d793d RS |
8174 | rtx tem; |
8175 | ||
2f93eea8 PB |
8176 | /* If X is a register whose nonzero bits value is current, use it. |
8177 | Otherwise, if X is a register whose value we can find, use that | |
8178 | value. Otherwise, use the previously-computed global nonzero bits | |
8179 | for this register. */ | |
8180 | ||
8181 | if (reg_stat[REGNO (x)].last_set_value != 0 | |
8182 | && (reg_stat[REGNO (x)].last_set_mode == mode | |
8183 | || (GET_MODE_CLASS (reg_stat[REGNO (x)].last_set_mode) == MODE_INT | |
8184 | && GET_MODE_CLASS (mode) == MODE_INT)) | |
8185 | && (reg_stat[REGNO (x)].last_set_label == label_tick | |
8186 | || (REGNO (x) >= FIRST_PSEUDO_REGISTER | |
8187 | && REG_N_SETS (REGNO (x)) == 1 | |
8188 | && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, | |
8189 | REGNO (x)))) | |
8190 | && INSN_CUID (reg_stat[REGNO (x)].last_set) < subst_low_cuid) | |
230d793d | 8191 | { |
2f93eea8 PB |
8192 | *nonzero &= reg_stat[REGNO (x)].last_set_nonzero_bits; |
8193 | return NULL; | |
230d793d RS |
8194 | } |
8195 | ||
2f93eea8 | 8196 | tem = get_last_value (x); |
230d793d | 8197 | |
2f93eea8 | 8198 | if (tem) |
0840fd91 | 8199 | { |
9afa3d54 | 8200 | #ifdef SHORT_IMMEDIATES_SIGN_EXTEND |
2f93eea8 PB |
8201 | /* If X is narrower than MODE and TEM is a non-negative |
8202 | constant that would appear negative in the mode of X, | |
8203 | sign-extend it for use in reg_nonzero_bits because some | |
8204 | machines (maybe most) will actually do the sign-extension | |
8205 | and this is the conservative approach. | |
8206 | ||
8207 | ??? For 2.5, try to tighten up the MD files in this regard | |
8208 | instead of this kludge. */ | |
8209 | ||
8210 | if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode) | |
8211 | && GET_CODE (tem) == CONST_INT | |
8212 | && INTVAL (tem) > 0 | |
8213 | && 0 != (INTVAL (tem) | |
8214 | & ((HOST_WIDE_INT) 1 | |
8215 | << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) | |
8216 | tem = GEN_INT (INTVAL (tem) | |
8217 | | ((HOST_WIDE_INT) (-1) | |
8218 | << GET_MODE_BITSIZE (GET_MODE (x)))); | |
230d793d | 8219 | #endif |
2f93eea8 | 8220 | return tem; |
230d793d | 8221 | } |
2f93eea8 | 8222 | else if (nonzero_sign_valid && reg_stat[REGNO (x)].nonzero_bits) |
8fd73754 | 8223 | { |
2f93eea8 | 8224 | unsigned HOST_WIDE_INT mask = reg_stat[REGNO (x)].nonzero_bits; |
8fd73754 | 8225 | |
2f93eea8 PB |
8226 | if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode)) |
8227 | /* We don't know anything about the upper bits. */ | |
8228 | mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x)); | |
8229 | *nonzero &= mask; | |
8fd73754 AN |
8230 | } |
8231 | ||
2f93eea8 | 8232 | return NULL; |
8fd73754 AN |
8233 | } |
8234 | ||
d0ab8cd3 | 8235 | /* Return the number of bits at the high-order end of X that are known to |
5109d49f RK |
8236 | be equal to the sign bit. X will be used in mode MODE; if MODE is |
8237 | VOIDmode, X will be used in its own mode. The returned value will always | |
8238 | be between 1 and the number of bits in MODE. */ | |
d0ab8cd3 | 8239 | |
2f93eea8 PB |
8240 | static rtx |
8241 | reg_num_sign_bit_copies_for_combine (rtx x, enum machine_mode mode, | |
8242 | rtx known_x ATTRIBUTE_UNUSED, | |
8243 | enum machine_mode known_mode | |
8244 | ATTRIBUTE_UNUSED, | |
8245 | unsigned int known_ret ATTRIBUTE_UNUSED, | |
8246 | unsigned int *result) | |
d0ab8cd3 | 8247 | { |
d0ab8cd3 RK |
8248 | rtx tem; |
8249 | ||
2f93eea8 PB |
8250 | if (reg_stat[REGNO (x)].last_set_value != 0 |
8251 | && reg_stat[REGNO (x)].last_set_mode == mode | |
8252 | && (reg_stat[REGNO (x)].last_set_label == label_tick | |
8253 | || (REGNO (x) >= FIRST_PSEUDO_REGISTER | |
8254 | && REG_N_SETS (REGNO (x)) == 1 | |
8255 | && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, | |
8256 | REGNO (x)))) | |
8257 | && INSN_CUID (reg_stat[REGNO (x)].last_set) < subst_low_cuid) | |
770ae6cc | 8258 | { |
2f93eea8 PB |
8259 | *result = reg_stat[REGNO (x)].last_set_sign_bit_copies; |
8260 | return NULL; | |
d0ab8cd3 RK |
8261 | } |
8262 | ||
2f93eea8 PB |
8263 | tem = get_last_value (x); |
8264 | if (tem != 0) | |
8265 | return tem; | |
d0ab8cd3 | 8266 | |
2f93eea8 PB |
8267 | if (nonzero_sign_valid && reg_stat[REGNO (x)].sign_bit_copies != 0 |
8268 | && GET_MODE_BITSIZE (GET_MODE (x)) == GET_MODE_BITSIZE (mode)) | |
8269 | *result = reg_stat[REGNO (x)].sign_bit_copies; | |
8270 | ||
8271 | return NULL; | |
d0ab8cd3 RK |
8272 | } |
8273 | \f | |
1a26b032 RK |
8274 | /* Return the number of "extended" bits there are in X, when interpreted |
8275 | as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For | |
8276 | unsigned quantities, this is the number of high-order zero bits. | |
8277 | For signed quantities, this is the number of copies of the sign bit | |
8278 | minus 1. In both case, this function returns the number of "spare" | |
8279 | bits. For example, if two quantities for which this function returns | |
8280 | at least 1 are added, the addition is known not to overflow. | |
8281 | ||
8282 | This function will always return 0 unless called during combine, which | |
8283 | implies that it must be called from a define_split. */ | |
8284 | ||
770ae6cc | 8285 | unsigned int |
79a490a9 | 8286 | extended_count (rtx x, enum machine_mode mode, int unsignedp) |
1a26b032 | 8287 | { |
951553af | 8288 | if (nonzero_sign_valid == 0) |
1a26b032 RK |
8289 | return 0; |
8290 | ||
8291 | return (unsignedp | |
ac49a949 | 8292 | ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
26c34780 RS |
8293 | ? (unsigned int) (GET_MODE_BITSIZE (mode) - 1 |
8294 | - floor_log2 (nonzero_bits (x, mode))) | |
770ae6cc | 8295 | : 0) |
1a26b032 RK |
8296 | : num_sign_bit_copies (x, mode) - 1); |
8297 | } | |
8298 | \f | |
230d793d RS |
8299 | /* This function is called from `simplify_shift_const' to merge two |
8300 | outer operations. Specifically, we have already found that we need | |
8301 | to perform operation *POP0 with constant *PCONST0 at the outermost | |
8302 | position. We would now like to also perform OP1 with constant CONST1 | |
8303 | (with *POP0 being done last). | |
8304 | ||
8305 | Return 1 if we can do the operation and update *POP0 and *PCONST0 with | |
663522cb | 8306 | the resulting operation. *PCOMP_P is set to 1 if we would need to |
230d793d RS |
8307 | complement the innermost operand, otherwise it is unchanged. |
8308 | ||
8309 | MODE is the mode in which the operation will be done. No bits outside | |
8310 | the width of this mode matter. It is assumed that the width of this mode | |
5f4f0e22 | 8311 | is smaller than or equal to HOST_BITS_PER_WIDE_INT. |
230d793d | 8312 | |
f822d252 | 8313 | If *POP0 or OP1 are UNKNOWN, it means no operation is required. Only NEG, PLUS, |
230d793d RS |
8314 | IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper |
8315 | result is simply *PCONST0. | |
8316 | ||
8317 | If the resulting operation cannot be expressed as one operation, we | |
8318 | return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */ | |
8319 | ||
8320 | static int | |
79a490a9 | 8321 | merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0, enum rtx_code op1, HOST_WIDE_INT const1, enum machine_mode mode, int *pcomp_p) |
230d793d RS |
8322 | { |
8323 | enum rtx_code op0 = *pop0; | |
5f4f0e22 | 8324 | HOST_WIDE_INT const0 = *pconst0; |
230d793d RS |
8325 | |
8326 | const0 &= GET_MODE_MASK (mode); | |
8327 | const1 &= GET_MODE_MASK (mode); | |
8328 | ||
8329 | /* If OP0 is an AND, clear unimportant bits in CONST1. */ | |
8330 | if (op0 == AND) | |
8331 | const1 &= const0; | |
8332 | ||
f822d252 | 8333 | /* If OP0 or OP1 is UNKNOWN, this is easy. Similarly if they are the same or |
230d793d RS |
8334 | if OP0 is SET. */ |
8335 | ||
f822d252 | 8336 | if (op1 == UNKNOWN || op0 == SET) |
230d793d RS |
8337 | return 1; |
8338 | ||
f822d252 | 8339 | else if (op0 == UNKNOWN) |
230d793d RS |
8340 | op0 = op1, const0 = const1; |
8341 | ||
8342 | else if (op0 == op1) | |
8343 | { | |
8344 | switch (op0) | |
8345 | { | |
8346 | case AND: | |
8347 | const0 &= const1; | |
8348 | break; | |
8349 | case IOR: | |
8350 | const0 |= const1; | |
8351 | break; | |
8352 | case XOR: | |
8353 | const0 ^= const1; | |
8354 | break; | |
8355 | case PLUS: | |
8356 | const0 += const1; | |
8357 | break; | |
8358 | case NEG: | |
f822d252 | 8359 | op0 = UNKNOWN; |
230d793d | 8360 | break; |
e9a25f70 JL |
8361 | default: |
8362 | break; | |
230d793d RS |
8363 | } |
8364 | } | |
8365 | ||
8366 | /* Otherwise, if either is a PLUS or NEG, we can't do anything. */ | |
8367 | else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG) | |
8368 | return 0; | |
8369 | ||
8370 | /* If the two constants aren't the same, we can't do anything. The | |
8371 | remaining six cases can all be done. */ | |
8372 | else if (const0 != const1) | |
8373 | return 0; | |
8374 | ||
8375 | else | |
8376 | switch (op0) | |
8377 | { | |
8378 | case IOR: | |
8379 | if (op1 == AND) | |
8380 | /* (a & b) | b == b */ | |
8381 | op0 = SET; | |
8382 | else /* op1 == XOR */ | |
8383 | /* (a ^ b) | b == a | b */ | |
b729186a | 8384 | {;} |
230d793d RS |
8385 | break; |
8386 | ||
8387 | case XOR: | |
8388 | if (op1 == AND) | |
8389 | /* (a & b) ^ b == (~a) & b */ | |
8390 | op0 = AND, *pcomp_p = 1; | |
8391 | else /* op1 == IOR */ | |
8392 | /* (a | b) ^ b == a & ~b */ | |
7d4444ea | 8393 | op0 = AND, const0 = ~const0; |
230d793d RS |
8394 | break; |
8395 | ||
8396 | case AND: | |
8397 | if (op1 == IOR) | |
8398 | /* (a | b) & b == b */ | |
8399 | op0 = SET; | |
8400 | else /* op1 == XOR */ | |
8401 | /* (a ^ b) & b) == (~a) & b */ | |
8402 | *pcomp_p = 1; | |
8403 | break; | |
e9a25f70 JL |
8404 | default: |
8405 | break; | |
230d793d RS |
8406 | } |
8407 | ||
8408 | /* Check for NO-OP cases. */ | |
8409 | const0 &= GET_MODE_MASK (mode); | |
8410 | if (const0 == 0 | |
8411 | && (op0 == IOR || op0 == XOR || op0 == PLUS)) | |
f822d252 | 8412 | op0 = UNKNOWN; |
230d793d RS |
8413 | else if (const0 == 0 && op0 == AND) |
8414 | op0 = SET; | |
e51712db KG |
8415 | else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode) |
8416 | && op0 == AND) | |
f822d252 | 8417 | op0 = UNKNOWN; |
230d793d | 8418 | |
7e4ce834 RH |
8419 | /* ??? Slightly redundant with the above mask, but not entirely. |
8420 | Moving this above means we'd have to sign-extend the mode mask | |
8421 | for the final test. */ | |
8422 | const0 = trunc_int_for_mode (const0, mode); | |
9fa6d012 | 8423 | |
230d793d RS |
8424 | *pop0 = op0; |
8425 | *pconst0 = const0; | |
8426 | ||
8427 | return 1; | |
8428 | } | |
8429 | \f | |
8430 | /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift. | |
da7d8304 | 8431 | The result of the shift is RESULT_MODE. X, if nonzero, is an expression |
230d793d RS |
8432 | that we started with. |
8433 | ||
8434 | The shift is normally computed in the widest mode we find in VAROP, as | |
8435 | long as it isn't a different number of words than RESULT_MODE. Exceptions | |
8436 | are ASHIFTRT and ROTATE, which are always done in their original mode, */ | |
8437 | ||
8438 | static rtx | |
79a490a9 AJ |
8439 | simplify_shift_const (rtx x, enum rtx_code code, |
8440 | enum machine_mode result_mode, rtx varop, | |
8441 | int orig_count) | |
230d793d RS |
8442 | { |
8443 | enum rtx_code orig_code = code; | |
770ae6cc RK |
8444 | unsigned int count; |
8445 | int signed_count; | |
230d793d RS |
8446 | enum machine_mode mode = result_mode; |
8447 | enum machine_mode shift_mode, tmode; | |
770ae6cc | 8448 | unsigned int mode_words |
230d793d RS |
8449 | = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; |
8450 | /* We form (outer_op (code varop count) (outer_const)). */ | |
f822d252 | 8451 | enum rtx_code outer_op = UNKNOWN; |
c4e861e8 | 8452 | HOST_WIDE_INT outer_const = 0; |
230d793d RS |
8453 | rtx const_rtx; |
8454 | int complement_p = 0; | |
8455 | rtx new; | |
8456 | ||
0051b6ca RH |
8457 | /* Make sure and truncate the "natural" shift on the way in. We don't |
8458 | want to do this inside the loop as it makes it more difficult to | |
8459 | combine shifts. */ | |
0051b6ca RH |
8460 | if (SHIFT_COUNT_TRUNCATED) |
8461 | orig_count &= GET_MODE_BITSIZE (mode) - 1; | |
0051b6ca | 8462 | |
230d793d RS |
8463 | /* If we were given an invalid count, don't do anything except exactly |
8464 | what was requested. */ | |
8465 | ||
0051b6ca | 8466 | if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode)) |
230d793d RS |
8467 | { |
8468 | if (x) | |
8469 | return x; | |
8470 | ||
0051b6ca | 8471 | return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (orig_count)); |
230d793d RS |
8472 | } |
8473 | ||
0051b6ca | 8474 | count = orig_count; |
853d8828 | 8475 | |
230d793d RS |
8476 | /* Unless one of the branches of the `if' in this loop does a `continue', |
8477 | we will `break' the loop after the `if'. */ | |
8478 | ||
8479 | while (count != 0) | |
8480 | { | |
8481 | /* If we have an operand of (clobber (const_int 0)), just return that | |
8482 | value. */ | |
8483 | if (GET_CODE (varop) == CLOBBER) | |
8484 | return varop; | |
8485 | ||
8486 | /* If we discovered we had to complement VAROP, leave. Making a NOT | |
8487 | here would cause an infinite loop. */ | |
8488 | if (complement_p) | |
8489 | break; | |
8490 | ||
abc95ed3 | 8491 | /* Convert ROTATERT to ROTATE. */ |
230d793d | 8492 | if (code == ROTATERT) |
ad9df12f IS |
8493 | { |
8494 | unsigned int bitsize = GET_MODE_BITSIZE (result_mode);; | |
8495 | code = ROTATE; | |
8496 | if (VECTOR_MODE_P (result_mode)) | |
8497 | count = bitsize / GET_MODE_NUNITS (result_mode) - count; | |
8498 | else | |
8499 | count = bitsize - count; | |
8500 | } | |
230d793d | 8501 | |
230d793d | 8502 | /* We need to determine what mode we will do the shift in. If the |
f6789c77 RK |
8503 | shift is a right shift or a ROTATE, we must always do it in the mode |
8504 | it was originally done in. Otherwise, we can do it in MODE, the | |
0f41302f | 8505 | widest mode encountered. */ |
f6789c77 RK |
8506 | shift_mode |
8507 | = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE | |
8508 | ? result_mode : mode); | |
230d793d RS |
8509 | |
8510 | /* Handle cases where the count is greater than the size of the mode | |
853d8828 RH |
8511 | minus 1. For ASHIFT, use the size minus one as the count (this can |
8512 | occur when simplifying (lshiftrt (ashiftrt ..))). For rotates, | |
8513 | take the count modulo the size. For other shifts, the result is | |
8514 | zero. | |
230d793d RS |
8515 | |
8516 | Since these shifts are being produced by the compiler by combining | |
8517 | multiple operations, each of which are defined, we know what the | |
8518 | result is supposed to be. */ | |
663522cb | 8519 | |
26c34780 | 8520 | if (count > (unsigned int) (GET_MODE_BITSIZE (shift_mode) - 1)) |
230d793d RS |
8521 | { |
8522 | if (code == ASHIFTRT) | |
8523 | count = GET_MODE_BITSIZE (shift_mode) - 1; | |
8524 | else if (code == ROTATE || code == ROTATERT) | |
8525 | count %= GET_MODE_BITSIZE (shift_mode); | |
8526 | else | |
8527 | { | |
8528 | /* We can't simply return zero because there may be an | |
8529 | outer op. */ | |
8530 | varop = const0_rtx; | |
8531 | count = 0; | |
8532 | break; | |
8533 | } | |
8534 | } | |
8535 | ||
312def2e RK |
8536 | /* An arithmetic right shift of a quantity known to be -1 or 0 |
8537 | is a no-op. */ | |
8538 | if (code == ASHIFTRT | |
8539 | && (num_sign_bit_copies (varop, shift_mode) | |
8540 | == GET_MODE_BITSIZE (shift_mode))) | |
d0ab8cd3 | 8541 | { |
312def2e RK |
8542 | count = 0; |
8543 | break; | |
8544 | } | |
d0ab8cd3 | 8545 | |
312def2e RK |
8546 | /* If we are doing an arithmetic right shift and discarding all but |
8547 | the sign bit copies, this is equivalent to doing a shift by the | |
8548 | bitsize minus one. Convert it into that shift because it will often | |
8549 | allow other simplifications. */ | |
500c518b | 8550 | |
312def2e RK |
8551 | if (code == ASHIFTRT |
8552 | && (count + num_sign_bit_copies (varop, shift_mode) | |
8553 | >= GET_MODE_BITSIZE (shift_mode))) | |
8554 | count = GET_MODE_BITSIZE (shift_mode) - 1; | |
500c518b | 8555 | |
230d793d RS |
8556 | /* We simplify the tests below and elsewhere by converting |
8557 | ASHIFTRT to LSHIFTRT if we know the sign bit is clear. | |
e0a2f705 KH |
8558 | `make_compound_operation' will convert it to an ASHIFTRT for |
8559 | those machines (such as VAX) that don't have an LSHIFTRT. */ | |
5f4f0e22 | 8560 | if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT |
230d793d | 8561 | && code == ASHIFTRT |
951553af | 8562 | && ((nonzero_bits (varop, shift_mode) |
5f4f0e22 CH |
8563 | & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1))) |
8564 | == 0)) | |
230d793d RS |
8565 | code = LSHIFTRT; |
8566 | ||
b9422b69 JH |
8567 | if (code == LSHIFTRT |
8568 | && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT | |
8569 | && !(nonzero_bits (varop, shift_mode) >> count)) | |
2d21f7d6 | 8570 | varop = const0_rtx; |
b9422b69 JH |
8571 | if (code == ASHIFT |
8572 | && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT | |
8573 | && !((nonzero_bits (varop, shift_mode) << count) | |
8574 | & GET_MODE_MASK (shift_mode))) | |
2d21f7d6 | 8575 | varop = const0_rtx; |
b9422b69 | 8576 | |
230d793d RS |
8577 | switch (GET_CODE (varop)) |
8578 | { | |
8579 | case SIGN_EXTEND: | |
8580 | case ZERO_EXTEND: | |
8581 | case SIGN_EXTRACT: | |
8582 | case ZERO_EXTRACT: | |
8583 | new = expand_compound_operation (varop); | |
8584 | if (new != varop) | |
8585 | { | |
8586 | varop = new; | |
8587 | continue; | |
8588 | } | |
8589 | break; | |
8590 | ||
8591 | case MEM: | |
8592 | /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH | |
8593 | minus the width of a smaller mode, we can do this with a | |
8594 | SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */ | |
8595 | if ((code == ASHIFTRT || code == LSHIFTRT) | |
8596 | && ! mode_dependent_address_p (XEXP (varop, 0)) | |
8597 | && ! MEM_VOLATILE_P (varop) | |
8598 | && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count, | |
8599 | MODE_INT, 1)) != BLKmode) | |
8600 | { | |
f1ec5147 RK |
8601 | new = adjust_address_nv (varop, tmode, |
8602 | BYTES_BIG_ENDIAN ? 0 | |
8603 | : count / BITS_PER_UNIT); | |
bf49b139 | 8604 | |
f1c6ba8b RK |
8605 | varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND |
8606 | : ZERO_EXTEND, mode, new); | |
230d793d RS |
8607 | count = 0; |
8608 | continue; | |
8609 | } | |
8610 | break; | |
8611 | ||
8612 | case USE: | |
8613 | /* Similar to the case above, except that we can only do this if | |
8614 | the resulting mode is the same as that of the underlying | |
8615 | MEM and adjust the address depending on the *bits* endianness | |
8616 | because of the way that bit-field extract insns are defined. */ | |
8617 | if ((code == ASHIFTRT || code == LSHIFTRT) | |
8618 | && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count, | |
8619 | MODE_INT, 1)) != BLKmode | |
8620 | && tmode == GET_MODE (XEXP (varop, 0))) | |
8621 | { | |
f76b9db2 ILT |
8622 | if (BITS_BIG_ENDIAN) |
8623 | new = XEXP (varop, 0); | |
8624 | else | |
8625 | { | |
8626 | new = copy_rtx (XEXP (varop, 0)); | |
663522cb | 8627 | SUBST (XEXP (new, 0), |
f76b9db2 ILT |
8628 | plus_constant (XEXP (new, 0), |
8629 | count / BITS_PER_UNIT)); | |
8630 | } | |
230d793d | 8631 | |
f1c6ba8b RK |
8632 | varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND |
8633 | : ZERO_EXTEND, mode, new); | |
230d793d RS |
8634 | count = 0; |
8635 | continue; | |
8636 | } | |
8637 | break; | |
8638 | ||
8639 | case SUBREG: | |
8640 | /* If VAROP is a SUBREG, strip it as long as the inner operand has | |
8641 | the same number of words as what we've seen so far. Then store | |
8642 | the widest mode in MODE. */ | |
f9e67232 RS |
8643 | if (subreg_lowpart_p (varop) |
8644 | && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))) | |
8645 | > GET_MODE_SIZE (GET_MODE (varop))) | |
26c34780 RS |
8646 | && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))) |
8647 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) | |
8648 | == mode_words) | |
230d793d RS |
8649 | { |
8650 | varop = SUBREG_REG (varop); | |
8651 | if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode)) | |
8652 | mode = GET_MODE (varop); | |
8653 | continue; | |
8654 | } | |
8655 | break; | |
8656 | ||
8657 | case MULT: | |
8658 | /* Some machines use MULT instead of ASHIFT because MULT | |
8659 | is cheaper. But it is still better on those machines to | |
8660 | merge two shifts into one. */ | |
8661 | if (GET_CODE (XEXP (varop, 1)) == CONST_INT | |
8662 | && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0) | |
8663 | { | |
770ae6cc | 8664 | varop |
1999435c PB |
8665 | = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0), |
8666 | GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1))))); | |
230d793d RS |
8667 | continue; |
8668 | } | |
8669 | break; | |
8670 | ||
8671 | case UDIV: | |
8672 | /* Similar, for when divides are cheaper. */ | |
8673 | if (GET_CODE (XEXP (varop, 1)) == CONST_INT | |
8674 | && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0) | |
8675 | { | |
770ae6cc | 8676 | varop |
1999435c PB |
8677 | = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0), |
8678 | GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1))))); | |
230d793d RS |
8679 | continue; |
8680 | } | |
8681 | break; | |
8682 | ||
8683 | case ASHIFTRT: | |
8f8d8d6e AO |
8684 | /* If we are extracting just the sign bit of an arithmetic |
8685 | right shift, that shift is not needed. However, the sign | |
8686 | bit of a wider mode may be different from what would be | |
8687 | interpreted as the sign bit in a narrower mode, so, if | |
8688 | the result is narrower, don't discard the shift. */ | |
26c34780 RS |
8689 | if (code == LSHIFTRT |
8690 | && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1) | |
8f8d8d6e AO |
8691 | && (GET_MODE_BITSIZE (result_mode) |
8692 | >= GET_MODE_BITSIZE (GET_MODE (varop)))) | |
230d793d RS |
8693 | { |
8694 | varop = XEXP (varop, 0); | |
8695 | continue; | |
8696 | } | |
8697 | ||
0f41302f | 8698 | /* ... fall through ... */ |
230d793d RS |
8699 | |
8700 | case LSHIFTRT: | |
8701 | case ASHIFT: | |
230d793d RS |
8702 | case ROTATE: |
8703 | /* Here we have two nested shifts. The result is usually the | |
8704 | AND of a new shift with a mask. We compute the result below. */ | |
8705 | if (GET_CODE (XEXP (varop, 1)) == CONST_INT | |
8706 | && INTVAL (XEXP (varop, 1)) >= 0 | |
8707 | && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop)) | |
5f4f0e22 CH |
8708 | && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT |
8709 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
230d793d RS |
8710 | { |
8711 | enum rtx_code first_code = GET_CODE (varop); | |
770ae6cc | 8712 | unsigned int first_count = INTVAL (XEXP (varop, 1)); |
5f4f0e22 | 8713 | unsigned HOST_WIDE_INT mask; |
230d793d | 8714 | rtx mask_rtx; |
230d793d | 8715 | |
230d793d RS |
8716 | /* We have one common special case. We can't do any merging if |
8717 | the inner code is an ASHIFTRT of a smaller mode. However, if | |
8718 | we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2) | |
8719 | with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2), | |
8720 | we can convert it to | |
8721 | (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1). | |
8722 | This simplifies certain SIGN_EXTEND operations. */ | |
8723 | if (code == ASHIFT && first_code == ASHIFTRT | |
26c34780 RS |
8724 | && count == (unsigned int) |
8725 | (GET_MODE_BITSIZE (result_mode) | |
8726 | - GET_MODE_BITSIZE (GET_MODE (varop)))) | |
230d793d RS |
8727 | { |
8728 | /* C3 has the low-order C1 bits zero. */ | |
663522cb | 8729 | |
5f4f0e22 | 8730 | mask = (GET_MODE_MASK (mode) |
663522cb | 8731 | & ~(((HOST_WIDE_INT) 1 << first_count) - 1)); |
230d793d | 8732 | |
5f4f0e22 | 8733 | varop = simplify_and_const_int (NULL_RTX, result_mode, |
230d793d | 8734 | XEXP (varop, 0), mask); |
5f4f0e22 | 8735 | varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode, |
230d793d RS |
8736 | varop, count); |
8737 | count = first_count; | |
8738 | code = ASHIFTRT; | |
8739 | continue; | |
8740 | } | |
663522cb | 8741 | |
d0ab8cd3 RK |
8742 | /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more |
8743 | than C1 high-order bits equal to the sign bit, we can convert | |
e0a2f705 | 8744 | this to either an ASHIFT or an ASHIFTRT depending on the |
663522cb | 8745 | two counts. |
230d793d RS |
8746 | |
8747 | We cannot do this if VAROP's mode is not SHIFT_MODE. */ | |
8748 | ||
8749 | if (code == ASHIFTRT && first_code == ASHIFT | |
8750 | && GET_MODE (varop) == shift_mode | |
d0ab8cd3 RK |
8751 | && (num_sign_bit_copies (XEXP (varop, 0), shift_mode) |
8752 | > first_count)) | |
230d793d | 8753 | { |
d0ab8cd3 | 8754 | varop = XEXP (varop, 0); |
770ae6cc RK |
8755 | |
8756 | signed_count = count - first_count; | |
8757 | if (signed_count < 0) | |
663522cb | 8758 | count = -signed_count, code = ASHIFT; |
770ae6cc RK |
8759 | else |
8760 | count = signed_count; | |
8761 | ||
d0ab8cd3 | 8762 | continue; |
230d793d RS |
8763 | } |
8764 | ||
8765 | /* There are some cases we can't do. If CODE is ASHIFTRT, | |
8766 | we can only do this if FIRST_CODE is also ASHIFTRT. | |
8767 | ||
8768 | We can't do the case when CODE is ROTATE and FIRST_CODE is | |
8769 | ASHIFTRT. | |
8770 | ||
8771 | If the mode of this shift is not the mode of the outer shift, | |
bdaae9a0 | 8772 | we can't do this if either shift is a right shift or ROTATE. |
230d793d RS |
8773 | |
8774 | Finally, we can't do any of these if the mode is too wide | |
8775 | unless the codes are the same. | |
8776 | ||
8777 | Handle the case where the shift codes are the same | |
8778 | first. */ | |
8779 | ||
8780 | if (code == first_code) | |
8781 | { | |
8782 | if (GET_MODE (varop) != result_mode | |
bdaae9a0 RK |
8783 | && (code == ASHIFTRT || code == LSHIFTRT |
8784 | || code == ROTATE)) | |
230d793d RS |
8785 | break; |
8786 | ||
8787 | count += first_count; | |
8788 | varop = XEXP (varop, 0); | |
8789 | continue; | |
8790 | } | |
8791 | ||
8792 | if (code == ASHIFTRT | |
8793 | || (code == ROTATE && first_code == ASHIFTRT) | |
5f4f0e22 | 8794 | || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT |
230d793d | 8795 | || (GET_MODE (varop) != result_mode |
bdaae9a0 RK |
8796 | && (first_code == ASHIFTRT || first_code == LSHIFTRT |
8797 | || first_code == ROTATE | |
230d793d RS |
8798 | || code == ROTATE))) |
8799 | break; | |
8800 | ||
8801 | /* To compute the mask to apply after the shift, shift the | |
663522cb | 8802 | nonzero bits of the inner shift the same way the |
230d793d RS |
8803 | outer shift will. */ |
8804 | ||
951553af | 8805 | mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop))); |
230d793d RS |
8806 | |
8807 | mask_rtx | |
8808 | = simplify_binary_operation (code, result_mode, mask_rtx, | |
5f4f0e22 | 8809 | GEN_INT (count)); |
663522cb | 8810 | |
230d793d RS |
8811 | /* Give up if we can't compute an outer operation to use. */ |
8812 | if (mask_rtx == 0 | |
8813 | || GET_CODE (mask_rtx) != CONST_INT | |
8814 | || ! merge_outer_ops (&outer_op, &outer_const, AND, | |
8815 | INTVAL (mask_rtx), | |
8816 | result_mode, &complement_p)) | |
8817 | break; | |
8818 | ||
8819 | /* If the shifts are in the same direction, we add the | |
8820 | counts. Otherwise, we subtract them. */ | |
770ae6cc | 8821 | signed_count = count; |
230d793d RS |
8822 | if ((code == ASHIFTRT || code == LSHIFTRT) |
8823 | == (first_code == ASHIFTRT || first_code == LSHIFTRT)) | |
770ae6cc | 8824 | signed_count += first_count; |
230d793d | 8825 | else |
770ae6cc | 8826 | signed_count -= first_count; |
230d793d | 8827 | |
663522cb | 8828 | /* If COUNT is positive, the new shift is usually CODE, |
230d793d RS |
8829 | except for the two exceptions below, in which case it is |
8830 | FIRST_CODE. If the count is negative, FIRST_CODE should | |
8831 | always be used */ | |
770ae6cc | 8832 | if (signed_count > 0 |
230d793d RS |
8833 | && ((first_code == ROTATE && code == ASHIFT) |
8834 | || (first_code == ASHIFTRT && code == LSHIFTRT))) | |
770ae6cc RK |
8835 | code = first_code, count = signed_count; |
8836 | else if (signed_count < 0) | |
663522cb | 8837 | code = first_code, count = -signed_count; |
770ae6cc RK |
8838 | else |
8839 | count = signed_count; | |
230d793d RS |
8840 | |
8841 | varop = XEXP (varop, 0); | |
8842 | continue; | |
8843 | } | |
8844 | ||
8845 | /* If we have (A << B << C) for any shift, we can convert this to | |
8846 | (A << C << B). This wins if A is a constant. Only try this if | |
8847 | B is not a constant. */ | |
8848 | ||
8849 | else if (GET_CODE (varop) == code | |
8850 | && GET_CODE (XEXP (varop, 1)) != CONST_INT | |
8851 | && 0 != (new | |
8852 | = simplify_binary_operation (code, mode, | |
8853 | XEXP (varop, 0), | |
5f4f0e22 | 8854 | GEN_INT (count)))) |
230d793d | 8855 | { |
f1c6ba8b | 8856 | varop = gen_rtx_fmt_ee (code, mode, new, XEXP (varop, 1)); |
230d793d RS |
8857 | count = 0; |
8858 | continue; | |
8859 | } | |
8860 | break; | |
8861 | ||
8862 | case NOT: | |
8863 | /* Make this fit the case below. */ | |
f1c6ba8b RK |
8864 | varop = gen_rtx_XOR (mode, XEXP (varop, 0), |
8865 | GEN_INT (GET_MODE_MASK (mode))); | |
230d793d RS |
8866 | continue; |
8867 | ||
8868 | case IOR: | |
8869 | case AND: | |
8870 | case XOR: | |
8871 | /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C) | |
8872 | with C the size of VAROP - 1 and the shift is logical if | |
8873 | STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1, | |
8874 | we have an (le X 0) operation. If we have an arithmetic shift | |
8875 | and STORE_FLAG_VALUE is 1 or we have a logical shift with | |
8876 | STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */ | |
8877 | ||
8878 | if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS | |
8879 | && XEXP (XEXP (varop, 0), 1) == constm1_rtx | |
8880 | && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) | |
8881 | && (code == LSHIFTRT || code == ASHIFTRT) | |
26c34780 RS |
8882 | && count == (unsigned int) |
8883 | (GET_MODE_BITSIZE (GET_MODE (varop)) - 1) | |
230d793d RS |
8884 | && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1))) |
8885 | { | |
8886 | count = 0; | |
f1c6ba8b RK |
8887 | varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1), |
8888 | const0_rtx); | |
230d793d RS |
8889 | |
8890 | if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT) | |
f1c6ba8b | 8891 | varop = gen_rtx_NEG (GET_MODE (varop), varop); |
230d793d RS |
8892 | |
8893 | continue; | |
8894 | } | |
8895 | ||
8896 | /* If we have (shift (logical)), move the logical to the outside | |
8897 | to allow it to possibly combine with another logical and the | |
8898 | shift to combine with another shift. This also canonicalizes to | |
8899 | what a ZERO_EXTRACT looks like. Also, some machines have | |
8900 | (and (shift)) insns. */ | |
8901 | ||
8902 | if (GET_CODE (XEXP (varop, 1)) == CONST_INT | |
39a44a4e RK |
8903 | /* We can't do this if we have (ashiftrt (xor)) and the |
8904 | constant has its sign bit set in shift_mode. */ | |
8905 | && !(code == ASHIFTRT && GET_CODE (varop) == XOR | |
8906 | && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)), | |
8907 | shift_mode)) | |
230d793d RS |
8908 | && (new = simplify_binary_operation (code, result_mode, |
8909 | XEXP (varop, 1), | |
5f4f0e22 | 8910 | GEN_INT (count))) != 0 |
663522cb | 8911 | && GET_CODE (new) == CONST_INT |
230d793d RS |
8912 | && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop), |
8913 | INTVAL (new), result_mode, &complement_p)) | |
8914 | { | |
8915 | varop = XEXP (varop, 0); | |
8916 | continue; | |
8917 | } | |
8918 | ||
8919 | /* If we can't do that, try to simplify the shift in each arm of the | |
8920 | logical expression, make a new logical expression, and apply | |
39a44a4e RK |
8921 | the inverse distributive law. This also can't be done |
8922 | for some (ashiftrt (xor)). */ | |
446f52f4 GS |
8923 | if (GET_CODE (XEXP (varop, 1)) == CONST_INT |
8924 | && !(code == ASHIFTRT && GET_CODE (varop) == XOR | |
8925 | && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)), | |
8926 | shift_mode))) | |
39a44a4e RK |
8927 | { |
8928 | rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode, | |
8929 | XEXP (varop, 0), count); | |
8930 | rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode, | |
8931 | XEXP (varop, 1), count); | |
230d793d | 8932 | |
1999435c | 8933 | varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs); |
39a44a4e | 8934 | varop = apply_distributive_law (varop); |
230d793d | 8935 | |
39a44a4e | 8936 | count = 0; |
446f52f4 | 8937 | continue; |
39a44a4e | 8938 | } |
230d793d RS |
8939 | break; |
8940 | ||
8941 | case EQ: | |
beb235f8 | 8942 | /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE |
230d793d | 8943 | says that the sign bit can be tested, FOO has mode MODE, C is |
45620ed4 RK |
8944 | GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit |
8945 | that may be nonzero. */ | |
8946 | if (code == LSHIFTRT | |
230d793d RS |
8947 | && XEXP (varop, 1) == const0_rtx |
8948 | && GET_MODE (XEXP (varop, 0)) == result_mode | |
26c34780 | 8949 | && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1) |
5f4f0e22 | 8950 | && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT |
230d793d | 8951 | && ((STORE_FLAG_VALUE |
663522cb | 8952 | & ((HOST_WIDE_INT) 1 |
770ae6cc | 8953 | < (GET_MODE_BITSIZE (result_mode) - 1)))) |
951553af | 8954 | && nonzero_bits (XEXP (varop, 0), result_mode) == 1 |
5f4f0e22 CH |
8955 | && merge_outer_ops (&outer_op, &outer_const, XOR, |
8956 | (HOST_WIDE_INT) 1, result_mode, | |
8957 | &complement_p)) | |
230d793d RS |
8958 | { |
8959 | varop = XEXP (varop, 0); | |
8960 | count = 0; | |
8961 | continue; | |
8962 | } | |
8963 | break; | |
8964 | ||
8965 | case NEG: | |
d0ab8cd3 RK |
8966 | /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less |
8967 | than the number of bits in the mode is equivalent to A. */ | |
26c34780 RS |
8968 | if (code == LSHIFTRT |
8969 | && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1) | |
951553af | 8970 | && nonzero_bits (XEXP (varop, 0), result_mode) == 1) |
230d793d | 8971 | { |
d0ab8cd3 | 8972 | varop = XEXP (varop, 0); |
230d793d RS |
8973 | count = 0; |
8974 | continue; | |
8975 | } | |
8976 | ||
8977 | /* NEG commutes with ASHIFT since it is multiplication. Move the | |
8978 | NEG outside to allow shifts to combine. */ | |
8979 | if (code == ASHIFT | |
5f4f0e22 CH |
8980 | && merge_outer_ops (&outer_op, &outer_const, NEG, |
8981 | (HOST_WIDE_INT) 0, result_mode, | |
8982 | &complement_p)) | |
230d793d RS |
8983 | { |
8984 | varop = XEXP (varop, 0); | |
8985 | continue; | |
8986 | } | |
8987 | break; | |
8988 | ||
8989 | case PLUS: | |
d0ab8cd3 RK |
8990 | /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C |
8991 | is one less than the number of bits in the mode is | |
8992 | equivalent to (xor A 1). */ | |
26c34780 RS |
8993 | if (code == LSHIFTRT |
8994 | && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1) | |
230d793d | 8995 | && XEXP (varop, 1) == constm1_rtx |
951553af | 8996 | && nonzero_bits (XEXP (varop, 0), result_mode) == 1 |
5f4f0e22 CH |
8997 | && merge_outer_ops (&outer_op, &outer_const, XOR, |
8998 | (HOST_WIDE_INT) 1, result_mode, | |
8999 | &complement_p)) | |
230d793d RS |
9000 | { |
9001 | count = 0; | |
9002 | varop = XEXP (varop, 0); | |
9003 | continue; | |
9004 | } | |
9005 | ||
3f508eca | 9006 | /* If we have (xshiftrt (plus FOO BAR) C), and the only bits |
951553af | 9007 | that might be nonzero in BAR are those being shifted out and those |
3f508eca RK |
9008 | bits are known zero in FOO, we can replace the PLUS with FOO. |
9009 | Similarly in the other operand order. This code occurs when | |
9010 | we are computing the size of a variable-size array. */ | |
9011 | ||
9012 | if ((code == ASHIFTRT || code == LSHIFTRT) | |
5f4f0e22 | 9013 | && count < HOST_BITS_PER_WIDE_INT |
951553af RK |
9014 | && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0 |
9015 | && (nonzero_bits (XEXP (varop, 1), result_mode) | |
9016 | & nonzero_bits (XEXP (varop, 0), result_mode)) == 0) | |
3f508eca RK |
9017 | { |
9018 | varop = XEXP (varop, 0); | |
9019 | continue; | |
9020 | } | |
9021 | else if ((code == ASHIFTRT || code == LSHIFTRT) | |
5f4f0e22 | 9022 | && count < HOST_BITS_PER_WIDE_INT |
ac49a949 | 9023 | && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT |
951553af | 9024 | && 0 == (nonzero_bits (XEXP (varop, 0), result_mode) |
3f508eca | 9025 | >> count) |
951553af RK |
9026 | && 0 == (nonzero_bits (XEXP (varop, 0), result_mode) |
9027 | & nonzero_bits (XEXP (varop, 1), | |
3f508eca RK |
9028 | result_mode))) |
9029 | { | |
9030 | varop = XEXP (varop, 1); | |
9031 | continue; | |
9032 | } | |
9033 | ||
230d793d RS |
9034 | /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */ |
9035 | if (code == ASHIFT | |
9036 | && GET_CODE (XEXP (varop, 1)) == CONST_INT | |
9037 | && (new = simplify_binary_operation (ASHIFT, result_mode, | |
9038 | XEXP (varop, 1), | |
5f4f0e22 | 9039 | GEN_INT (count))) != 0 |
770ae6cc | 9040 | && GET_CODE (new) == CONST_INT |
230d793d RS |
9041 | && merge_outer_ops (&outer_op, &outer_const, PLUS, |
9042 | INTVAL (new), result_mode, &complement_p)) | |
9043 | { | |
9044 | varop = XEXP (varop, 0); | |
9045 | continue; | |
9046 | } | |
b757b9f8 PH |
9047 | |
9048 | /* Check for 'PLUS signbit', which is the canonical form of 'XOR | |
9049 | signbit', and attempt to change the PLUS to an XOR and move it to | |
9050 | the outer operation as is done above in the AND/IOR/XOR case | |
9051 | leg for shift(logical). See details in logical handling above | |
471854f8 | 9052 | for reasoning in doing so. */ |
b757b9f8 PH |
9053 | if (code == LSHIFTRT |
9054 | && GET_CODE (XEXP (varop, 1)) == CONST_INT | |
9055 | && mode_signbit_p (result_mode, XEXP (varop, 1)) | |
9056 | && (new = simplify_binary_operation (code, result_mode, | |
9057 | XEXP (varop, 1), | |
9058 | GEN_INT (count))) != 0 | |
9059 | && GET_CODE (new) == CONST_INT | |
9060 | && merge_outer_ops (&outer_op, &outer_const, XOR, | |
9061 | INTVAL (new), result_mode, &complement_p)) | |
9062 | { | |
9063 | varop = XEXP (varop, 0); | |
9064 | continue; | |
9065 | } | |
9066 | ||
230d793d RS |
9067 | break; |
9068 | ||
9069 | case MINUS: | |
9070 | /* If we have (xshiftrt (minus (ashiftrt X C)) X) C) | |
9071 | with C the size of VAROP - 1 and the shift is logical if | |
9072 | STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1, | |
9073 | we have a (gt X 0) operation. If the shift is arithmetic with | |
9074 | STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1, | |
9075 | we have a (neg (gt X 0)) operation. */ | |
9076 | ||
0802d516 RK |
9077 | if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) |
9078 | && GET_CODE (XEXP (varop, 0)) == ASHIFTRT | |
26c34780 RS |
9079 | && count == (unsigned int) |
9080 | (GET_MODE_BITSIZE (GET_MODE (varop)) - 1) | |
230d793d RS |
9081 | && (code == LSHIFTRT || code == ASHIFTRT) |
9082 | && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT | |
26c34780 RS |
9083 | && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (varop, 0), 1)) |
9084 | == count | |
230d793d RS |
9085 | && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1))) |
9086 | { | |
9087 | count = 0; | |
f1c6ba8b RK |
9088 | varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1), |
9089 | const0_rtx); | |
230d793d RS |
9090 | |
9091 | if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT) | |
f1c6ba8b | 9092 | varop = gen_rtx_NEG (GET_MODE (varop), varop); |
230d793d RS |
9093 | |
9094 | continue; | |
9095 | } | |
9096 | break; | |
6e0ef100 JC |
9097 | |
9098 | case TRUNCATE: | |
9099 | /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt)) | |
9100 | if the truncate does not affect the value. */ | |
9101 | if (code == LSHIFTRT | |
9102 | && GET_CODE (XEXP (varop, 0)) == LSHIFTRT | |
9103 | && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT | |
9104 | && (INTVAL (XEXP (XEXP (varop, 0), 1)) | |
b577a8ff JL |
9105 | >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0))) |
9106 | - GET_MODE_BITSIZE (GET_MODE (varop))))) | |
6e0ef100 JC |
9107 | { |
9108 | rtx varop_inner = XEXP (varop, 0); | |
9109 | ||
770ae6cc | 9110 | varop_inner |
f1c6ba8b RK |
9111 | = gen_rtx_LSHIFTRT (GET_MODE (varop_inner), |
9112 | XEXP (varop_inner, 0), | |
9113 | GEN_INT | |
9114 | (count + INTVAL (XEXP (varop_inner, 1)))); | |
9115 | varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner); | |
6e0ef100 JC |
9116 | count = 0; |
9117 | continue; | |
9118 | } | |
9119 | break; | |
663522cb | 9120 | |
e9a25f70 JL |
9121 | default: |
9122 | break; | |
230d793d RS |
9123 | } |
9124 | ||
9125 | break; | |
9126 | } | |
9127 | ||
9128 | /* We need to determine what mode to do the shift in. If the shift is | |
f6789c77 RK |
9129 | a right shift or ROTATE, we must always do it in the mode it was |
9130 | originally done in. Otherwise, we can do it in MODE, the widest mode | |
9131 | encountered. The code we care about is that of the shift that will | |
9132 | actually be done, not the shift that was originally requested. */ | |
9133 | shift_mode | |
9134 | = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE | |
9135 | ? result_mode : mode); | |
230d793d RS |
9136 | |
9137 | /* We have now finished analyzing the shift. The result should be | |
9138 | a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If | |
f822d252 | 9139 | OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied |
230d793d RS |
9140 | to the result of the shift. OUTER_CONST is the relevant constant, |
9141 | but we must turn off all bits turned off in the shift. | |
9142 | ||
9143 | If we were passed a value for X, see if we can use any pieces of | |
9144 | it. If not, make new rtx. */ | |
9145 | ||
ec8e098d | 9146 | if (x && GET_RTX_CLASS (GET_CODE (x)) == RTX_BIN_ARITH |
230d793d | 9147 | && GET_CODE (XEXP (x, 1)) == CONST_INT |
26c34780 | 9148 | && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == count) |
230d793d RS |
9149 | const_rtx = XEXP (x, 1); |
9150 | else | |
5f4f0e22 | 9151 | const_rtx = GEN_INT (count); |
230d793d RS |
9152 | |
9153 | if (x && GET_CODE (XEXP (x, 0)) == SUBREG | |
9154 | && GET_MODE (XEXP (x, 0)) == shift_mode | |
9155 | && SUBREG_REG (XEXP (x, 0)) == varop) | |
9156 | varop = XEXP (x, 0); | |
9157 | else if (GET_MODE (varop) != shift_mode) | |
4de249d9 | 9158 | varop = gen_lowpart (shift_mode, varop); |
230d793d | 9159 | |
0f41302f | 9160 | /* If we can't make the SUBREG, try to return what we were given. */ |
230d793d RS |
9161 | if (GET_CODE (varop) == CLOBBER) |
9162 | return x ? x : varop; | |
9163 | ||
9164 | new = simplify_binary_operation (code, shift_mode, varop, const_rtx); | |
9165 | if (new != 0) | |
9166 | x = new; | |
9167 | else | |
6c2d03d0 | 9168 | x = gen_rtx_fmt_ee (code, shift_mode, varop, const_rtx); |
230d793d | 9169 | |
224eeff2 RK |
9170 | /* If we have an outer operation and we just made a shift, it is |
9171 | possible that we could have simplified the shift were it not | |
9172 | for the outer operation. So try to do the simplification | |
9173 | recursively. */ | |
9174 | ||
f822d252 | 9175 | if (outer_op != UNKNOWN && GET_CODE (x) == code |
224eeff2 RK |
9176 | && GET_CODE (XEXP (x, 1)) == CONST_INT) |
9177 | x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0), | |
9178 | INTVAL (XEXP (x, 1))); | |
9179 | ||
e0a2f705 | 9180 | /* If we were doing an LSHIFTRT in a wider mode than it was originally, |
230d793d RS |
9181 | turn off all the bits that the shift would have turned off. */ |
9182 | if (orig_code == LSHIFTRT && result_mode != shift_mode) | |
5f4f0e22 | 9183 | x = simplify_and_const_int (NULL_RTX, shift_mode, x, |
230d793d | 9184 | GET_MODE_MASK (result_mode) >> orig_count); |
663522cb | 9185 | |
230d793d | 9186 | /* Do the remainder of the processing in RESULT_MODE. */ |
4de249d9 | 9187 | x = gen_lowpart (result_mode, x); |
230d793d RS |
9188 | |
9189 | /* If COMPLEMENT_P is set, we have to complement X before doing the outer | |
9190 | operation. */ | |
9191 | if (complement_p) | |
e869aa39 | 9192 | x = simplify_gen_unary (NOT, result_mode, x, result_mode); |
230d793d | 9193 | |
f822d252 | 9194 | if (outer_op != UNKNOWN) |
230d793d | 9195 | { |
5f4f0e22 | 9196 | if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT) |
7e4ce834 | 9197 | outer_const = trunc_int_for_mode (outer_const, result_mode); |
230d793d RS |
9198 | |
9199 | if (outer_op == AND) | |
5f4f0e22 | 9200 | x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const); |
230d793d RS |
9201 | else if (outer_op == SET) |
9202 | /* This means that we have determined that the result is | |
9203 | equivalent to a constant. This should be rare. */ | |
5f4f0e22 | 9204 | x = GEN_INT (outer_const); |
ec8e098d | 9205 | else if (GET_RTX_CLASS (outer_op) == RTX_UNARY) |
f1c6ba8b | 9206 | x = simplify_gen_unary (outer_op, result_mode, x, result_mode); |
230d793d | 9207 | else |
1999435c | 9208 | x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const)); |
230d793d RS |
9209 | } |
9210 | ||
9211 | return x; | |
663522cb | 9212 | } |
230d793d RS |
9213 | \f |
9214 | /* Like recog, but we receive the address of a pointer to a new pattern. | |
9215 | We try to match the rtx that the pointer points to. | |
9216 | If that fails, we may try to modify or replace the pattern, | |
9217 | storing the replacement into the same pointer object. | |
9218 | ||
9219 | Modifications include deletion or addition of CLOBBERs. | |
9220 | ||
9221 | PNOTES is a pointer to a location where any REG_UNUSED notes added for | |
9222 | the CLOBBERs are placed. | |
9223 | ||
9224 | The value is the final insn code from the pattern ultimately matched, | |
9225 | or -1. */ | |
9226 | ||
9227 | static int | |
79a490a9 | 9228 | recog_for_combine (rtx *pnewpat, rtx insn, rtx *pnotes) |
230d793d | 9229 | { |
b3694847 | 9230 | rtx pat = *pnewpat; |
230d793d RS |
9231 | int insn_code_number; |
9232 | int num_clobbers_to_add = 0; | |
9233 | int i; | |
9234 | rtx notes = 0; | |
e6d83128 | 9235 | rtx old_notes, old_pat; |
230d793d | 9236 | |
974f4146 RK |
9237 | /* If PAT is a PARALLEL, check to see if it contains the CLOBBER |
9238 | we use to indicate that something didn't match. If we find such a | |
9239 | thing, force rejection. */ | |
d96023cf | 9240 | if (GET_CODE (pat) == PARALLEL) |
974f4146 | 9241 | for (i = XVECLEN (pat, 0) - 1; i >= 0; i--) |
d96023cf RK |
9242 | if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER |
9243 | && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx) | |
974f4146 RK |
9244 | return -1; |
9245 | ||
e6d83128 JH |
9246 | old_pat = PATTERN (insn); |
9247 | old_notes = REG_NOTES (insn); | |
9248 | PATTERN (insn) = pat; | |
9249 | REG_NOTES (insn) = 0; | |
c1194d74 | 9250 | |
e6d83128 | 9251 | insn_code_number = recog (pat, insn, &num_clobbers_to_add); |
230d793d RS |
9252 | |
9253 | /* If it isn't, there is the possibility that we previously had an insn | |
9254 | that clobbered some register as a side effect, but the combined | |
9255 | insn doesn't need to do that. So try once more without the clobbers | |
9256 | unless this represents an ASM insn. */ | |
9257 | ||
9258 | if (insn_code_number < 0 && ! check_asm_operands (pat) | |
9259 | && GET_CODE (pat) == PARALLEL) | |
9260 | { | |
9261 | int pos; | |
9262 | ||
9263 | for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++) | |
9264 | if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER) | |
9265 | { | |
9266 | if (i != pos) | |
9267 | SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i)); | |
9268 | pos++; | |
9269 | } | |
9270 | ||
9271 | SUBST_INT (XVECLEN (pat, 0), pos); | |
9272 | ||
9273 | if (pos == 1) | |
9274 | pat = XVECEXP (pat, 0, 0); | |
9275 | ||
e6d83128 JH |
9276 | PATTERN (insn) = pat; |
9277 | insn_code_number = recog (pat, insn, &num_clobbers_to_add); | |
230d793d | 9278 | } |
e6d83128 JH |
9279 | PATTERN (insn) = old_pat; |
9280 | REG_NOTES (insn) = old_notes; | |
230d793d | 9281 | |
b5832b43 JH |
9282 | /* Recognize all noop sets, these will be killed by followup pass. */ |
9283 | if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat)) | |
9284 | insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0; | |
9285 | ||
230d793d RS |
9286 | /* If we had any clobbers to add, make a new pattern than contains |
9287 | them. Then check to make sure that all of them are dead. */ | |
9288 | if (num_clobbers_to_add) | |
9289 | { | |
38a448ca | 9290 | rtx newpat = gen_rtx_PARALLEL (VOIDmode, |
bf103ec2 R |
9291 | rtvec_alloc (GET_CODE (pat) == PARALLEL |
9292 | ? (XVECLEN (pat, 0) | |
9293 | + num_clobbers_to_add) | |
9294 | : num_clobbers_to_add + 1)); | |
230d793d RS |
9295 | |
9296 | if (GET_CODE (pat) == PARALLEL) | |
9297 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
9298 | XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i); | |
9299 | else | |
9300 | XVECEXP (newpat, 0, 0) = pat; | |
9301 | ||
9302 | add_clobbers (newpat, insn_code_number); | |
9303 | ||
9304 | for (i = XVECLEN (newpat, 0) - num_clobbers_to_add; | |
9305 | i < XVECLEN (newpat, 0); i++) | |
9306 | { | |
f8cfc6aa | 9307 | if (REG_P (XEXP (XVECEXP (newpat, 0, i), 0)) |
230d793d RS |
9308 | && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn)) |
9309 | return -1; | |
38a448ca RH |
9310 | notes = gen_rtx_EXPR_LIST (REG_UNUSED, |
9311 | XEXP (XVECEXP (newpat, 0, i), 0), notes); | |
230d793d RS |
9312 | } |
9313 | pat = newpat; | |
9314 | } | |
9315 | ||
9316 | *pnewpat = pat; | |
9317 | *pnotes = notes; | |
9318 | ||
9319 | return insn_code_number; | |
9320 | } | |
9321 | \f | |
4de249d9 PB |
9322 | /* Like gen_lowpart_general but for use by combine. In combine it |
9323 | is not possible to create any new pseudoregs. However, it is | |
9324 | safe to create invalid memory addresses, because combine will | |
9325 | try to recognize them and all they will do is make the combine | |
9326 | attempt fail. | |
230d793d RS |
9327 | |
9328 | If for some reason this cannot do its job, an rtx | |
9329 | (clobber (const_int 0)) is returned. | |
9330 | An insn containing that will not be recognized. */ | |
9331 | ||
230d793d | 9332 | static rtx |
7a32a925 | 9333 | gen_lowpart_for_combine (enum machine_mode omode, rtx x) |
230d793d | 9334 | { |
7a32a925 RH |
9335 | enum machine_mode imode = GET_MODE (x); |
9336 | unsigned int osize = GET_MODE_SIZE (omode); | |
9337 | unsigned int isize = GET_MODE_SIZE (imode); | |
230d793d RS |
9338 | rtx result; |
9339 | ||
7a32a925 | 9340 | if (omode == imode) |
230d793d RS |
9341 | return x; |
9342 | ||
7a32a925 RH |
9343 | /* Return identity if this is a CONST or symbolic reference. */ |
9344 | if (omode == Pmode | |
cafe096b EC |
9345 | && (GET_CODE (x) == CONST |
9346 | || GET_CODE (x) == SYMBOL_REF | |
9347 | || GET_CODE (x) == LABEL_REF)) | |
9348 | return x; | |
9349 | ||
eae957a8 RK |
9350 | /* We can only support MODE being wider than a word if X is a |
9351 | constant integer or has a mode the same size. */ | |
7a32a925 RH |
9352 | if (GET_MODE_SIZE (omode) > UNITS_PER_WORD |
9353 | && ! ((imode == VOIDmode | |
eae957a8 RK |
9354 | && (GET_CODE (x) == CONST_INT |
9355 | || GET_CODE (x) == CONST_DOUBLE)) | |
7a32a925 RH |
9356 | || isize == osize)) |
9357 | goto fail; | |
230d793d RS |
9358 | |
9359 | /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart | |
9360 | won't know what to do. So we will strip off the SUBREG here and | |
9361 | process normally. */ | |
3c0cb5de | 9362 | if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x))) |
230d793d RS |
9363 | { |
9364 | x = SUBREG_REG (x); | |
32f2ce02 HPN |
9365 | |
9366 | /* For use in case we fall down into the address adjustments | |
9367 | further below, we need to adjust the known mode and size of | |
9368 | x; imode and isize, since we just adjusted x. */ | |
9369 | imode = GET_MODE (x); | |
9370 | ||
9371 | if (imode == omode) | |
230d793d | 9372 | return x; |
32f2ce02 HPN |
9373 | |
9374 | isize = GET_MODE_SIZE (imode); | |
230d793d RS |
9375 | } |
9376 | ||
7a32a925 RH |
9377 | result = gen_lowpart_common (omode, x); |
9378 | ||
cff9f8d5 | 9379 | #ifdef CANNOT_CHANGE_MODE_CLASS |
41bf2a8b RH |
9380 | if (result != 0 && GET_CODE (result) == SUBREG) |
9381 | record_subregs_of_mode (result); | |
02188693 | 9382 | #endif |
64bf47a2 | 9383 | |
230d793d RS |
9384 | if (result) |
9385 | return result; | |
9386 | ||
3c0cb5de | 9387 | if (MEM_P (x)) |
230d793d | 9388 | { |
b3694847 | 9389 | int offset = 0; |
230d793d RS |
9390 | |
9391 | /* Refuse to work on a volatile memory ref or one with a mode-dependent | |
9392 | address. */ | |
9393 | if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0))) | |
7a32a925 | 9394 | goto fail; |
230d793d RS |
9395 | |
9396 | /* If we want to refer to something bigger than the original memref, | |
9a5a17f3 | 9397 | generate a paradoxical subreg instead. That will force a reload |
230d793d | 9398 | of the original memref X. */ |
7a32a925 RH |
9399 | if (isize < osize) |
9400 | return gen_rtx_SUBREG (omode, x, 0); | |
230d793d | 9401 | |
f76b9db2 | 9402 | if (WORDS_BIG_ENDIAN) |
7a32a925 | 9403 | offset = MAX (isize, UNITS_PER_WORD) - MAX (osize, UNITS_PER_WORD); |
c5c76735 | 9404 | |
7a32a925 | 9405 | /* Adjust the address so that the address-after-the-data is unchanged. */ |
f76b9db2 | 9406 | if (BYTES_BIG_ENDIAN) |
7a32a925 | 9407 | offset -= MIN (UNITS_PER_WORD, osize) - MIN (UNITS_PER_WORD, isize); |
f1ec5147 | 9408 | |
7a32a925 | 9409 | return adjust_address_nv (x, omode, offset); |
230d793d RS |
9410 | } |
9411 | ||
9412 | /* If X is a comparison operator, rewrite it in a new mode. This | |
9413 | probably won't match, but may allow further simplifications. */ | |
ec8e098d | 9414 | else if (COMPARISON_P (x)) |
7a32a925 | 9415 | return gen_rtx_fmt_ee (GET_CODE (x), omode, XEXP (x, 0), XEXP (x, 1)); |
230d793d RS |
9416 | |
9417 | /* If we couldn't simplify X any other way, just enclose it in a | |
9418 | SUBREG. Normally, this SUBREG won't match, but some patterns may | |
a7c99304 | 9419 | include an explicit SUBREG or we may simplify it further in combine. */ |
230d793d | 9420 | else |
dfbe1b2f | 9421 | { |
ddef6bc7 | 9422 | int offset = 0; |
e0e08ac2 | 9423 | rtx res; |
dfbe1b2f | 9424 | |
7a32a925 RH |
9425 | offset = subreg_lowpart_offset (omode, imode); |
9426 | if (imode == VOIDmode) | |
80ba02b1 | 9427 | { |
7a32a925 RH |
9428 | imode = int_mode_for_mode (omode); |
9429 | x = gen_lowpart_common (imode, x); | |
9430 | if (x == NULL) | |
9431 | goto fail; | |
80ba02b1 | 9432 | } |
7a32a925 | 9433 | res = simplify_gen_subreg (omode, x, imode, offset); |
e0e08ac2 JH |
9434 | if (res) |
9435 | return res; | |
dfbe1b2f | 9436 | } |
7a32a925 RH |
9437 | |
9438 | fail: | |
9439 | return gen_rtx_CLOBBER (imode, const0_rtx); | |
230d793d RS |
9440 | } |
9441 | \f | |
1999435c PB |
9442 | /* These routines make binary and unary operations by first seeing if they |
9443 | fold; if not, a new expression is allocated. */ | |
9444 | ||
9445 | static rtx | |
9446 | gen_binary (enum rtx_code code, enum machine_mode mode, rtx op0, rtx op1) | |
9447 | { | |
9448 | rtx result; | |
9449 | rtx tem; | |
9450 | ||
9451 | if (GET_CODE (op0) == CLOBBER) | |
9452 | return op0; | |
9453 | else if (GET_CODE (op1) == CLOBBER) | |
9454 | return op1; | |
9455 | ||
9456 | if (GET_RTX_CLASS (code) == RTX_COMM_ARITH | |
9457 | && swap_commutative_operands_p (op0, op1)) | |
9458 | tem = op0, op0 = op1, op1 = tem; | |
9459 | ||
9460 | if (GET_RTX_CLASS (code) == RTX_COMPARE | |
9461 | || GET_RTX_CLASS (code) == RTX_COMM_COMPARE) | |
9462 | { | |
9463 | enum machine_mode op_mode = GET_MODE (op0); | |
9464 | ||
9465 | /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get | |
9466 | just (REL_OP X Y). */ | |
9467 | if (GET_CODE (op0) == COMPARE && op1 == const0_rtx) | |
9468 | { | |
9469 | op1 = XEXP (op0, 1); | |
9470 | op0 = XEXP (op0, 0); | |
9471 | op_mode = GET_MODE (op0); | |
9472 | } | |
9473 | ||
9474 | if (op_mode == VOIDmode) | |
9475 | op_mode = GET_MODE (op1); | |
9476 | result = simplify_relational_operation (code, mode, op_mode, op0, op1); | |
9477 | } | |
9478 | else | |
9479 | result = simplify_binary_operation (code, mode, op0, op1); | |
9480 | ||
9481 | if (result) | |
9482 | return result; | |
9483 | ||
9484 | /* Put complex operands first and constants second. */ | |
9485 | if (GET_RTX_CLASS (code) == RTX_COMM_ARITH | |
9486 | && swap_commutative_operands_p (op0, op1)) | |
9487 | return gen_rtx_fmt_ee (code, mode, op1, op0); | |
9488 | ||
9489 | /* If we are turning off bits already known off in OP0, we need not do | |
9490 | an AND. */ | |
9491 | else if (code == AND && GET_CODE (op1) == CONST_INT | |
9492 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
9493 | && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0) | |
9494 | return op0; | |
9495 | ||
9496 | return gen_rtx_fmt_ee (code, mode, op0, op1); | |
9497 | } | |
9498 | \f | |
230d793d RS |
9499 | /* Simplify a comparison between *POP0 and *POP1 where CODE is the |
9500 | comparison code that will be tested. | |
9501 | ||
9502 | The result is a possibly different comparison code to use. *POP0 and | |
9503 | *POP1 may be updated. | |
9504 | ||
9505 | It is possible that we might detect that a comparison is either always | |
9506 | true or always false. However, we do not perform general constant | |
5089e22e | 9507 | folding in combine, so this knowledge isn't useful. Such tautologies |
230d793d RS |
9508 | should have been detected earlier. Hence we ignore all such cases. */ |
9509 | ||
9510 | static enum rtx_code | |
79a490a9 | 9511 | simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1) |
230d793d RS |
9512 | { |
9513 | rtx op0 = *pop0; | |
9514 | rtx op1 = *pop1; | |
9515 | rtx tem, tem1; | |
9516 | int i; | |
9517 | enum machine_mode mode, tmode; | |
9518 | ||
9519 | /* Try a few ways of applying the same transformation to both operands. */ | |
9520 | while (1) | |
9521 | { | |
3a19aabc RK |
9522 | #ifndef WORD_REGISTER_OPERATIONS |
9523 | /* The test below this one won't handle SIGN_EXTENDs on these machines, | |
9524 | so check specially. */ | |
9525 | if (code != GTU && code != GEU && code != LTU && code != LEU | |
9526 | && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT | |
9527 | && GET_CODE (XEXP (op0, 0)) == ASHIFT | |
9528 | && GET_CODE (XEXP (op1, 0)) == ASHIFT | |
9529 | && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG | |
9530 | && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG | |
9531 | && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))) | |
ad25ba17 | 9532 | == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0)))) |
3a19aabc | 9533 | && GET_CODE (XEXP (op0, 1)) == CONST_INT |
fa9ea255 KH |
9534 | && XEXP (op0, 1) == XEXP (op1, 1) |
9535 | && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1) | |
9536 | && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1) | |
3a19aabc RK |
9537 | && (INTVAL (XEXP (op0, 1)) |
9538 | == (GET_MODE_BITSIZE (GET_MODE (op0)) | |
9539 | - (GET_MODE_BITSIZE | |
9540 | (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))))))) | |
9541 | { | |
9542 | op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0)); | |
9543 | op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0)); | |
9544 | } | |
9545 | #endif | |
9546 | ||
230d793d RS |
9547 | /* If both operands are the same constant shift, see if we can ignore the |
9548 | shift. We can if the shift is a rotate or if the bits shifted out of | |
951553af | 9549 | this shift are known to be zero for both inputs and if the type of |
230d793d | 9550 | comparison is compatible with the shift. */ |
67232b23 RK |
9551 | if (GET_CODE (op0) == GET_CODE (op1) |
9552 | && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT | |
9553 | && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ)) | |
45620ed4 | 9554 | || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT) |
67232b23 RK |
9555 | && (code != GT && code != LT && code != GE && code != LE)) |
9556 | || (GET_CODE (op0) == ASHIFTRT | |
9557 | && (code != GTU && code != LTU | |
99dc5306 | 9558 | && code != GEU && code != LEU))) |
67232b23 RK |
9559 | && GET_CODE (XEXP (op0, 1)) == CONST_INT |
9560 | && INTVAL (XEXP (op0, 1)) >= 0 | |
9561 | && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT | |
9562 | && XEXP (op0, 1) == XEXP (op1, 1)) | |
230d793d RS |
9563 | { |
9564 | enum machine_mode mode = GET_MODE (op0); | |
5f4f0e22 | 9565 | unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode); |
230d793d RS |
9566 | int shift_count = INTVAL (XEXP (op0, 1)); |
9567 | ||
9568 | if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT) | |
9569 | mask &= (mask >> shift_count) << shift_count; | |
45620ed4 | 9570 | else if (GET_CODE (op0) == ASHIFT) |
230d793d RS |
9571 | mask = (mask & (mask << shift_count)) >> shift_count; |
9572 | ||
663522cb KH |
9573 | if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0 |
9574 | && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0) | |
230d793d RS |
9575 | op0 = XEXP (op0, 0), op1 = XEXP (op1, 0); |
9576 | else | |
9577 | break; | |
9578 | } | |
9579 | ||
9580 | /* If both operands are AND's of a paradoxical SUBREG by constant, the | |
9581 | SUBREGs are of the same mode, and, in both cases, the AND would | |
9582 | be redundant if the comparison was done in the narrower mode, | |
9583 | do the comparison in the narrower mode (e.g., we are AND'ing with 1 | |
951553af RK |
9584 | and the operand's possibly nonzero bits are 0xffffff01; in that case |
9585 | if we only care about QImode, we don't need the AND). This case | |
9586 | occurs if the output mode of an scc insn is not SImode and | |
7e4dc511 RK |
9587 | STORE_FLAG_VALUE == 1 (e.g., the 386). |
9588 | ||
9589 | Similarly, check for a case where the AND's are ZERO_EXTEND | |
9590 | operations from some narrower mode even though a SUBREG is not | |
9591 | present. */ | |
230d793d | 9592 | |
663522cb KH |
9593 | else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND |
9594 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
9595 | && GET_CODE (XEXP (op1, 1)) == CONST_INT) | |
230d793d | 9596 | { |
7e4dc511 RK |
9597 | rtx inner_op0 = XEXP (op0, 0); |
9598 | rtx inner_op1 = XEXP (op1, 0); | |
9599 | HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1)); | |
9600 | HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1)); | |
9601 | int changed = 0; | |
663522cb | 9602 | |
7e4dc511 RK |
9603 | if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG |
9604 | && (GET_MODE_SIZE (GET_MODE (inner_op0)) | |
9605 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0)))) | |
9606 | && (GET_MODE (SUBREG_REG (inner_op0)) | |
9607 | == GET_MODE (SUBREG_REG (inner_op1))) | |
729a2bc6 | 9608 | && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0))) |
7e4dc511 | 9609 | <= HOST_BITS_PER_WIDE_INT) |
01c82bbb | 9610 | && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0), |
729a2bc6 | 9611 | GET_MODE (SUBREG_REG (inner_op0))))) |
01c82bbb RK |
9612 | && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1), |
9613 | GET_MODE (SUBREG_REG (inner_op1)))))) | |
7e4dc511 RK |
9614 | { |
9615 | op0 = SUBREG_REG (inner_op0); | |
9616 | op1 = SUBREG_REG (inner_op1); | |
9617 | ||
9618 | /* The resulting comparison is always unsigned since we masked | |
0f41302f | 9619 | off the original sign bit. */ |
7e4dc511 RK |
9620 | code = unsigned_condition (code); |
9621 | ||
9622 | changed = 1; | |
9623 | } | |
230d793d | 9624 | |
7e4dc511 RK |
9625 | else if (c0 == c1) |
9626 | for (tmode = GET_CLASS_NARROWEST_MODE | |
9627 | (GET_MODE_CLASS (GET_MODE (op0))); | |
9628 | tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode)) | |
e51712db | 9629 | if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode)) |
7e4dc511 | 9630 | { |
4de249d9 PB |
9631 | op0 = gen_lowpart (tmode, inner_op0); |
9632 | op1 = gen_lowpart (tmode, inner_op1); | |
66415c8b | 9633 | code = unsigned_condition (code); |
7e4dc511 RK |
9634 | changed = 1; |
9635 | break; | |
9636 | } | |
9637 | ||
9638 | if (! changed) | |
9639 | break; | |
230d793d | 9640 | } |
3a19aabc | 9641 | |
ad25ba17 RK |
9642 | /* If both operands are NOT, we can strip off the outer operation |
9643 | and adjust the comparison code for swapped operands; similarly for | |
9644 | NEG, except that this must be an equality comparison. */ | |
9645 | else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT) | |
9646 | || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG | |
9647 | && (code == EQ || code == NE))) | |
9648 | op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code); | |
3a19aabc | 9649 | |
230d793d RS |
9650 | else |
9651 | break; | |
9652 | } | |
663522cb | 9653 | |
230d793d | 9654 | /* If the first operand is a constant, swap the operands and adjust the |
3aceff0d RK |
9655 | comparison code appropriately, but don't do this if the second operand |
9656 | is already a constant integer. */ | |
8c9864f3 | 9657 | if (swap_commutative_operands_p (op0, op1)) |
230d793d RS |
9658 | { |
9659 | tem = op0, op0 = op1, op1 = tem; | |
9660 | code = swap_condition (code); | |
9661 | } | |
9662 | ||
9663 | /* We now enter a loop during which we will try to simplify the comparison. | |
9664 | For the most part, we only are concerned with comparisons with zero, | |
9665 | but some things may really be comparisons with zero but not start | |
9666 | out looking that way. */ | |
9667 | ||
9668 | while (GET_CODE (op1) == CONST_INT) | |
9669 | { | |
9670 | enum machine_mode mode = GET_MODE (op0); | |
770ae6cc | 9671 | unsigned int mode_width = GET_MODE_BITSIZE (mode); |
5f4f0e22 | 9672 | unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode); |
230d793d RS |
9673 | int equality_comparison_p; |
9674 | int sign_bit_comparison_p; | |
9675 | int unsigned_comparison_p; | |
5f4f0e22 | 9676 | HOST_WIDE_INT const_op; |
230d793d RS |
9677 | |
9678 | /* We only want to handle integral modes. This catches VOIDmode, | |
9679 | CCmode, and the floating-point modes. An exception is that we | |
9680 | can handle VOIDmode if OP0 is a COMPARE or a comparison | |
9681 | operation. */ | |
9682 | ||
9683 | if (GET_MODE_CLASS (mode) != MODE_INT | |
9684 | && ! (mode == VOIDmode | |
ec8e098d | 9685 | && (GET_CODE (op0) == COMPARE || COMPARISON_P (op0)))) |
230d793d RS |
9686 | break; |
9687 | ||
9688 | /* Get the constant we are comparing against and turn off all bits | |
9689 | not on in our mode. */ | |
71012d97 GK |
9690 | const_op = INTVAL (op1); |
9691 | if (mode != VOIDmode) | |
9692 | const_op = trunc_int_for_mode (const_op, mode); | |
b4fbaca7 | 9693 | op1 = GEN_INT (const_op); |
230d793d RS |
9694 | |
9695 | /* If we are comparing against a constant power of two and the value | |
951553af | 9696 | being compared can only have that single bit nonzero (e.g., it was |
230d793d RS |
9697 | `and'ed with that bit), we can replace this with a comparison |
9698 | with zero. */ | |
9699 | if (const_op | |
9700 | && (code == EQ || code == NE || code == GE || code == GEU | |
9701 | || code == LT || code == LTU) | |
5f4f0e22 | 9702 | && mode_width <= HOST_BITS_PER_WIDE_INT |
230d793d | 9703 | && exact_log2 (const_op) >= 0 |
e51712db | 9704 | && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op) |
230d793d RS |
9705 | { |
9706 | code = (code == EQ || code == GE || code == GEU ? NE : EQ); | |
9707 | op1 = const0_rtx, const_op = 0; | |
9708 | } | |
9709 | ||
d0ab8cd3 RK |
9710 | /* Similarly, if we are comparing a value known to be either -1 or |
9711 | 0 with -1, change it to the opposite comparison against zero. */ | |
9712 | ||
9713 | if (const_op == -1 | |
9714 | && (code == EQ || code == NE || code == GT || code == LE | |
9715 | || code == GEU || code == LTU) | |
9716 | && num_sign_bit_copies (op0, mode) == mode_width) | |
9717 | { | |
9718 | code = (code == EQ || code == LE || code == GEU ? NE : EQ); | |
9719 | op1 = const0_rtx, const_op = 0; | |
9720 | } | |
9721 | ||
230d793d | 9722 | /* Do some canonicalizations based on the comparison code. We prefer |
663522cb | 9723 | comparisons against zero and then prefer equality comparisons. |
4803a34a | 9724 | If we can reduce the size of a constant, we will do that too. */ |
230d793d RS |
9725 | |
9726 | switch (code) | |
9727 | { | |
9728 | case LT: | |
4803a34a RK |
9729 | /* < C is equivalent to <= (C - 1) */ |
9730 | if (const_op > 0) | |
230d793d | 9731 | { |
4803a34a | 9732 | const_op -= 1; |
5f4f0e22 | 9733 | op1 = GEN_INT (const_op); |
230d793d RS |
9734 | code = LE; |
9735 | /* ... fall through to LE case below. */ | |
9736 | } | |
9737 | else | |
9738 | break; | |
9739 | ||
9740 | case LE: | |
4803a34a RK |
9741 | /* <= C is equivalent to < (C + 1); we do this for C < 0 */ |
9742 | if (const_op < 0) | |
9743 | { | |
9744 | const_op += 1; | |
5f4f0e22 | 9745 | op1 = GEN_INT (const_op); |
4803a34a RK |
9746 | code = LT; |
9747 | } | |
230d793d RS |
9748 | |
9749 | /* If we are doing a <= 0 comparison on a value known to have | |
9750 | a zero sign bit, we can replace this with == 0. */ | |
9751 | else if (const_op == 0 | |
5f4f0e22 | 9752 | && mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 9753 | && (nonzero_bits (op0, mode) |
5f4f0e22 | 9754 | & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0) |
230d793d RS |
9755 | code = EQ; |
9756 | break; | |
9757 | ||
9758 | case GE: | |
0f41302f | 9759 | /* >= C is equivalent to > (C - 1). */ |
4803a34a | 9760 | if (const_op > 0) |
230d793d | 9761 | { |
4803a34a | 9762 | const_op -= 1; |
5f4f0e22 | 9763 | op1 = GEN_INT (const_op); |
230d793d RS |
9764 | code = GT; |
9765 | /* ... fall through to GT below. */ | |
9766 | } | |
9767 | else | |
9768 | break; | |
9769 | ||
9770 | case GT: | |
663522cb | 9771 | /* > C is equivalent to >= (C + 1); we do this for C < 0. */ |
4803a34a RK |
9772 | if (const_op < 0) |
9773 | { | |
9774 | const_op += 1; | |
5f4f0e22 | 9775 | op1 = GEN_INT (const_op); |
4803a34a RK |
9776 | code = GE; |
9777 | } | |
230d793d RS |
9778 | |
9779 | /* If we are doing a > 0 comparison on a value known to have | |
9780 | a zero sign bit, we can replace this with != 0. */ | |
9781 | else if (const_op == 0 | |
5f4f0e22 | 9782 | && mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 9783 | && (nonzero_bits (op0, mode) |
5f4f0e22 | 9784 | & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0) |
230d793d RS |
9785 | code = NE; |
9786 | break; | |
9787 | ||
230d793d | 9788 | case LTU: |
4803a34a RK |
9789 | /* < C is equivalent to <= (C - 1). */ |
9790 | if (const_op > 0) | |
9791 | { | |
9792 | const_op -= 1; | |
5f4f0e22 | 9793 | op1 = GEN_INT (const_op); |
4803a34a | 9794 | code = LEU; |
0f41302f | 9795 | /* ... fall through ... */ |
4803a34a | 9796 | } |
d0ab8cd3 RK |
9797 | |
9798 | /* (unsigned) < 0x80000000 is equivalent to >= 0. */ | |
f77aada2 JW |
9799 | else if ((mode_width <= HOST_BITS_PER_WIDE_INT) |
9800 | && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))) | |
d0ab8cd3 RK |
9801 | { |
9802 | const_op = 0, op1 = const0_rtx; | |
9803 | code = GE; | |
9804 | break; | |
9805 | } | |
4803a34a RK |
9806 | else |
9807 | break; | |
230d793d RS |
9808 | |
9809 | case LEU: | |
9810 | /* unsigned <= 0 is equivalent to == 0 */ | |
9811 | if (const_op == 0) | |
9812 | code = EQ; | |
d0ab8cd3 | 9813 | |
0f41302f | 9814 | /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */ |
f77aada2 JW |
9815 | else if ((mode_width <= HOST_BITS_PER_WIDE_INT) |
9816 | && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)) | |
d0ab8cd3 RK |
9817 | { |
9818 | const_op = 0, op1 = const0_rtx; | |
9819 | code = GE; | |
9820 | } | |
230d793d RS |
9821 | break; |
9822 | ||
4803a34a | 9823 | case GEU: |
b8ff6ca0 | 9824 | /* >= C is equivalent to > (C - 1). */ |
4803a34a RK |
9825 | if (const_op > 1) |
9826 | { | |
9827 | const_op -= 1; | |
5f4f0e22 | 9828 | op1 = GEN_INT (const_op); |
4803a34a | 9829 | code = GTU; |
0f41302f | 9830 | /* ... fall through ... */ |
4803a34a | 9831 | } |
d0ab8cd3 RK |
9832 | |
9833 | /* (unsigned) >= 0x80000000 is equivalent to < 0. */ | |
f77aada2 JW |
9834 | else if ((mode_width <= HOST_BITS_PER_WIDE_INT) |
9835 | && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))) | |
d0ab8cd3 RK |
9836 | { |
9837 | const_op = 0, op1 = const0_rtx; | |
9838 | code = LT; | |
8b2e69e1 | 9839 | break; |
d0ab8cd3 | 9840 | } |
4803a34a RK |
9841 | else |
9842 | break; | |
9843 | ||
230d793d RS |
9844 | case GTU: |
9845 | /* unsigned > 0 is equivalent to != 0 */ | |
9846 | if (const_op == 0) | |
9847 | code = NE; | |
d0ab8cd3 RK |
9848 | |
9849 | /* (unsigned) > 0x7fffffff is equivalent to < 0. */ | |
f77aada2 | 9850 | else if ((mode_width <= HOST_BITS_PER_WIDE_INT) |
e869aa39 | 9851 | && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)) |
d0ab8cd3 RK |
9852 | { |
9853 | const_op = 0, op1 = const0_rtx; | |
9854 | code = LT; | |
9855 | } | |
230d793d | 9856 | break; |
e9a25f70 JL |
9857 | |
9858 | default: | |
9859 | break; | |
230d793d RS |
9860 | } |
9861 | ||
9862 | /* Compute some predicates to simplify code below. */ | |
9863 | ||
9864 | equality_comparison_p = (code == EQ || code == NE); | |
9865 | sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0); | |
9866 | unsigned_comparison_p = (code == LTU || code == LEU || code == GTU | |
d5010e66 | 9867 | || code == GEU); |
230d793d | 9868 | |
6139ff20 RK |
9869 | /* If this is a sign bit comparison and we can do arithmetic in |
9870 | MODE, say that we will only be needing the sign bit of OP0. */ | |
9871 | if (sign_bit_comparison_p | |
9872 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
9873 | op0 = force_to_mode (op0, mode, | |
9874 | ((HOST_WIDE_INT) 1 | |
9875 | << (GET_MODE_BITSIZE (mode) - 1)), | |
e3d616e3 | 9876 | NULL_RTX, 0); |
6139ff20 | 9877 | |
230d793d RS |
9878 | /* Now try cases based on the opcode of OP0. If none of the cases |
9879 | does a "continue", we exit this loop immediately after the | |
9880 | switch. */ | |
9881 | ||
9882 | switch (GET_CODE (op0)) | |
9883 | { | |
9884 | case ZERO_EXTRACT: | |
9885 | /* If we are extracting a single bit from a variable position in | |
9886 | a constant that has only a single bit set and are comparing it | |
663522cb | 9887 | with zero, we can convert this into an equality comparison |
d7cd794f | 9888 | between the position and the location of the single bit. */ |
a475bff7 RH |
9889 | /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might |
9890 | have already reduced the shift count modulo the word size. */ | |
9891 | if (!SHIFT_COUNT_TRUNCATED | |
9892 | && GET_CODE (XEXP (op0, 0)) == CONST_INT | |
230d793d RS |
9893 | && XEXP (op0, 1) == const1_rtx |
9894 | && equality_comparison_p && const_op == 0 | |
d7cd794f | 9895 | && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0) |
230d793d | 9896 | { |
f76b9db2 | 9897 | if (BITS_BIG_ENDIAN) |
0d8e55d8 | 9898 | { |
da920570 ZW |
9899 | enum machine_mode new_mode |
9900 | = mode_for_extraction (EP_extzv, 1); | |
9901 | if (new_mode == MAX_MACHINE_MODE) | |
9902 | i = BITS_PER_WORD - 1 - i; | |
9903 | else | |
9904 | { | |
9905 | mode = new_mode; | |
9906 | i = (GET_MODE_BITSIZE (mode) - 1 - i); | |
9907 | } | |
0d8e55d8 | 9908 | } |
230d793d RS |
9909 | |
9910 | op0 = XEXP (op0, 2); | |
5f4f0e22 | 9911 | op1 = GEN_INT (i); |
230d793d RS |
9912 | const_op = i; |
9913 | ||
9914 | /* Result is nonzero iff shift count is equal to I. */ | |
9915 | code = reverse_condition (code); | |
9916 | continue; | |
9917 | } | |
230d793d | 9918 | |
0f41302f | 9919 | /* ... fall through ... */ |
230d793d RS |
9920 | |
9921 | case SIGN_EXTRACT: | |
9922 | tem = expand_compound_operation (op0); | |
9923 | if (tem != op0) | |
9924 | { | |
9925 | op0 = tem; | |
9926 | continue; | |
9927 | } | |
9928 | break; | |
9929 | ||
9930 | case NOT: | |
9931 | /* If testing for equality, we can take the NOT of the constant. */ | |
9932 | if (equality_comparison_p | |
9933 | && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0) | |
9934 | { | |
9935 | op0 = XEXP (op0, 0); | |
9936 | op1 = tem; | |
9937 | continue; | |
9938 | } | |
9939 | ||
9940 | /* If just looking at the sign bit, reverse the sense of the | |
9941 | comparison. */ | |
9942 | if (sign_bit_comparison_p) | |
9943 | { | |
9944 | op0 = XEXP (op0, 0); | |
9945 | code = (code == GE ? LT : GE); | |
9946 | continue; | |
9947 | } | |
9948 | break; | |
9949 | ||
9950 | case NEG: | |
9951 | /* If testing for equality, we can take the NEG of the constant. */ | |
9952 | if (equality_comparison_p | |
9953 | && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0) | |
9954 | { | |
9955 | op0 = XEXP (op0, 0); | |
9956 | op1 = tem; | |
9957 | continue; | |
9958 | } | |
9959 | ||
9960 | /* The remaining cases only apply to comparisons with zero. */ | |
9961 | if (const_op != 0) | |
9962 | break; | |
9963 | ||
9964 | /* When X is ABS or is known positive, | |
9965 | (neg X) is < 0 if and only if X != 0. */ | |
9966 | ||
9967 | if (sign_bit_comparison_p | |
9968 | && (GET_CODE (XEXP (op0, 0)) == ABS | |
5f4f0e22 | 9969 | || (mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 9970 | && (nonzero_bits (XEXP (op0, 0), mode) |
5f4f0e22 | 9971 | & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0))) |
230d793d RS |
9972 | { |
9973 | op0 = XEXP (op0, 0); | |
9974 | code = (code == LT ? NE : EQ); | |
9975 | continue; | |
9976 | } | |
9977 | ||
3bed8141 | 9978 | /* If we have NEG of something whose two high-order bits are the |
0f41302f | 9979 | same, we know that "(-a) < 0" is equivalent to "a > 0". */ |
3bed8141 | 9980 | if (num_sign_bit_copies (op0, mode) >= 2) |
230d793d RS |
9981 | { |
9982 | op0 = XEXP (op0, 0); | |
9983 | code = swap_condition (code); | |
9984 | continue; | |
9985 | } | |
9986 | break; | |
9987 | ||
9988 | case ROTATE: | |
9989 | /* If we are testing equality and our count is a constant, we | |
9990 | can perform the inverse operation on our RHS. */ | |
9991 | if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
9992 | && (tem = simplify_binary_operation (ROTATERT, mode, | |
9993 | op1, XEXP (op0, 1))) != 0) | |
9994 | { | |
9995 | op0 = XEXP (op0, 0); | |
9996 | op1 = tem; | |
9997 | continue; | |
9998 | } | |
9999 | ||
10000 | /* If we are doing a < 0 or >= 0 comparison, it means we are testing | |
10001 | a particular bit. Convert it to an AND of a constant of that | |
10002 | bit. This will be converted into a ZERO_EXTRACT. */ | |
10003 | if (const_op == 0 && sign_bit_comparison_p | |
10004 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
5f4f0e22 | 10005 | && mode_width <= HOST_BITS_PER_WIDE_INT) |
230d793d | 10006 | { |
5f4f0e22 CH |
10007 | op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), |
10008 | ((HOST_WIDE_INT) 1 | |
10009 | << (mode_width - 1 | |
10010 | - INTVAL (XEXP (op0, 1))))); | |
230d793d RS |
10011 | code = (code == LT ? NE : EQ); |
10012 | continue; | |
10013 | } | |
10014 | ||
663522cb | 10015 | /* Fall through. */ |
230d793d RS |
10016 | |
10017 | case ABS: | |
10018 | /* ABS is ignorable inside an equality comparison with zero. */ | |
10019 | if (const_op == 0 && equality_comparison_p) | |
10020 | { | |
10021 | op0 = XEXP (op0, 0); | |
10022 | continue; | |
10023 | } | |
10024 | break; | |
230d793d RS |
10025 | |
10026 | case SIGN_EXTEND: | |
10027 | /* Can simplify (compare (zero/sign_extend FOO) CONST) | |
663522cb | 10028 | to (compare FOO CONST) if CONST fits in FOO's mode and we |
230d793d RS |
10029 | are either testing inequality or have an unsigned comparison |
10030 | with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */ | |
10031 | if (! unsigned_comparison_p | |
10032 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) | |
5f4f0e22 CH |
10033 | <= HOST_BITS_PER_WIDE_INT) |
10034 | && ((unsigned HOST_WIDE_INT) const_op | |
e51712db | 10035 | < (((unsigned HOST_WIDE_INT) 1 |
5f4f0e22 | 10036 | << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1))))) |
230d793d RS |
10037 | { |
10038 | op0 = XEXP (op0, 0); | |
10039 | continue; | |
10040 | } | |
10041 | break; | |
10042 | ||
10043 | case SUBREG: | |
f917ae96 EB |
10044 | /* Check for the case where we are comparing A - C1 with C2, that is |
10045 | ||
10046 | (subreg:MODE (plus (A) (-C1))) op (C2) | |
10047 | ||
10048 | with C1 a constant, and try to lift the SUBREG, i.e. to do the | |
10049 | comparison in the wider mode. One of the following two conditions | |
10050 | must be true in order for this to be valid: | |
10051 | ||
10052 | 1. The mode extension results in the same bit pattern being added | |
10053 | on both sides and the comparison is equality or unsigned. As | |
10054 | C2 has been truncated to fit in MODE, the pattern can only be | |
10055 | all 0s or all 1s. | |
10056 | ||
10057 | 2. The mode extension results in the sign bit being copied on | |
10058 | each side. | |
10059 | ||
10060 | The difficulty here is that we have predicates for A but not for | |
10061 | (A - C1) so we need to check that C1 is within proper bounds so | |
10062 | as to perturbate A as little as possible. */ | |
a687e897 RK |
10063 | |
10064 | if (mode_width <= HOST_BITS_PER_WIDE_INT | |
10065 | && subreg_lowpart_p (op0) | |
f917ae96 | 10066 | && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width |
a687e897 | 10067 | && GET_CODE (SUBREG_REG (op0)) == PLUS |
f917ae96 | 10068 | && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT) |
a687e897 | 10069 | { |
f917ae96 EB |
10070 | enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0)); |
10071 | rtx a = XEXP (SUBREG_REG (op0), 0); | |
10072 | HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1)); | |
10073 | ||
10074 | if ((c1 > 0 | |
10075 | && (unsigned HOST_WIDE_INT) c1 | |
10076 | < (unsigned HOST_WIDE_INT) 1 << (mode_width - 1) | |
10077 | && (equality_comparison_p || unsigned_comparison_p) | |
10078 | /* (A - C1) zero-extends if it is positive and sign-extends | |
10079 | if it is negative, C2 both zero- and sign-extends. */ | |
10080 | && ((0 == (nonzero_bits (a, inner_mode) | |
10081 | & ~GET_MODE_MASK (mode)) | |
10082 | && const_op >= 0) | |
10083 | /* (A - C1) sign-extends if it is positive and 1-extends | |
10084 | if it is negative, C2 both sign- and 1-extends. */ | |
10085 | || (num_sign_bit_copies (a, inner_mode) | |
10086 | > (unsigned int) (GET_MODE_BITSIZE (inner_mode) | |
10087 | - mode_width) | |
10088 | && const_op < 0))) | |
10089 | || ((unsigned HOST_WIDE_INT) c1 | |
10090 | < (unsigned HOST_WIDE_INT) 1 << (mode_width - 2) | |
10091 | /* (A - C1) always sign-extends, like C2. */ | |
10092 | && num_sign_bit_copies (a, inner_mode) | |
10093 | > (unsigned int) (GET_MODE_BITSIZE (inner_mode) | |
10094 | - mode_width - 1))) | |
10095 | { | |
10096 | op0 = SUBREG_REG (op0); | |
10097 | continue; | |
10098 | } | |
a687e897 RK |
10099 | } |
10100 | ||
fe0cf571 RK |
10101 | /* If the inner mode is narrower and we are extracting the low part, |
10102 | we can treat the SUBREG as if it were a ZERO_EXTEND. */ | |
10103 | if (subreg_lowpart_p (op0) | |
89f1c7f2 RS |
10104 | && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width) |
10105 | /* Fall through */ ; | |
10106 | else | |
230d793d RS |
10107 | break; |
10108 | ||
0f41302f | 10109 | /* ... fall through ... */ |
230d793d RS |
10110 | |
10111 | case ZERO_EXTEND: | |
10112 | if ((unsigned_comparison_p || equality_comparison_p) | |
10113 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) | |
5f4f0e22 CH |
10114 | <= HOST_BITS_PER_WIDE_INT) |
10115 | && ((unsigned HOST_WIDE_INT) const_op | |
230d793d RS |
10116 | < GET_MODE_MASK (GET_MODE (XEXP (op0, 0))))) |
10117 | { | |
10118 | op0 = XEXP (op0, 0); | |
10119 | continue; | |
10120 | } | |
10121 | break; | |
10122 | ||
10123 | case PLUS: | |
20fdd649 | 10124 | /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do |
5089e22e | 10125 | this for equality comparisons due to pathological cases involving |
230d793d | 10126 | overflows. */ |
20fdd649 RK |
10127 | if (equality_comparison_p |
10128 | && 0 != (tem = simplify_binary_operation (MINUS, mode, | |
10129 | op1, XEXP (op0, 1)))) | |
230d793d RS |
10130 | { |
10131 | op0 = XEXP (op0, 0); | |
10132 | op1 = tem; | |
10133 | continue; | |
10134 | } | |
10135 | ||
10136 | /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */ | |
10137 | if (const_op == 0 && XEXP (op0, 1) == constm1_rtx | |
10138 | && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p) | |
10139 | { | |
10140 | op0 = XEXP (XEXP (op0, 0), 0); | |
10141 | code = (code == LT ? EQ : NE); | |
10142 | continue; | |
10143 | } | |
10144 | break; | |
10145 | ||
10146 | case MINUS: | |
65945ec1 HPN |
10147 | /* We used to optimize signed comparisons against zero, but that |
10148 | was incorrect. Unsigned comparisons against zero (GTU, LEU) | |
10149 | arrive here as equality comparisons, or (GEU, LTU) are | |
10150 | optimized away. No need to special-case them. */ | |
0bd4b461 | 10151 | |
20fdd649 RK |
10152 | /* (eq (minus A B) C) -> (eq A (plus B C)) or |
10153 | (eq B (minus A C)), whichever simplifies. We can only do | |
10154 | this for equality comparisons due to pathological cases involving | |
10155 | overflows. */ | |
10156 | if (equality_comparison_p | |
10157 | && 0 != (tem = simplify_binary_operation (PLUS, mode, | |
10158 | XEXP (op0, 1), op1))) | |
10159 | { | |
10160 | op0 = XEXP (op0, 0); | |
10161 | op1 = tem; | |
10162 | continue; | |
10163 | } | |
10164 | ||
10165 | if (equality_comparison_p | |
10166 | && 0 != (tem = simplify_binary_operation (MINUS, mode, | |
10167 | XEXP (op0, 0), op1))) | |
10168 | { | |
10169 | op0 = XEXP (op0, 1); | |
10170 | op1 = tem; | |
10171 | continue; | |
10172 | } | |
10173 | ||
230d793d RS |
10174 | /* The sign bit of (minus (ashiftrt X C) X), where C is the number |
10175 | of bits in X minus 1, is one iff X > 0. */ | |
10176 | if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT | |
10177 | && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | |
26c34780 RS |
10178 | && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1)) |
10179 | == mode_width - 1 | |
230d793d RS |
10180 | && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1))) |
10181 | { | |
10182 | op0 = XEXP (op0, 1); | |
10183 | code = (code == GE ? LE : GT); | |
10184 | continue; | |
10185 | } | |
10186 | break; | |
10187 | ||
10188 | case XOR: | |
10189 | /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification | |
10190 | if C is zero or B is a constant. */ | |
10191 | if (equality_comparison_p | |
10192 | && 0 != (tem = simplify_binary_operation (XOR, mode, | |
10193 | XEXP (op0, 1), op1))) | |
10194 | { | |
10195 | op0 = XEXP (op0, 0); | |
10196 | op1 = tem; | |
10197 | continue; | |
10198 | } | |
10199 | break; | |
10200 | ||
10201 | case EQ: case NE: | |
69bc0a1f JH |
10202 | case UNEQ: case LTGT: |
10203 | case LT: case LTU: case UNLT: case LE: case LEU: case UNLE: | |
10204 | case GT: case GTU: case UNGT: case GE: case GEU: case UNGE: | |
10205 | case UNORDERED: case ORDERED: | |
230d793d RS |
10206 | /* We can't do anything if OP0 is a condition code value, rather |
10207 | than an actual data value. */ | |
10208 | if (const_op != 0 | |
8beccec8 | 10209 | || CC0_P (XEXP (op0, 0)) |
230d793d RS |
10210 | || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC) |
10211 | break; | |
10212 | ||
10213 | /* Get the two operands being compared. */ | |
10214 | if (GET_CODE (XEXP (op0, 0)) == COMPARE) | |
10215 | tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1); | |
10216 | else | |
10217 | tem = XEXP (op0, 0), tem1 = XEXP (op0, 1); | |
10218 | ||
10219 | /* Check for the cases where we simply want the result of the | |
10220 | earlier test or the opposite of that result. */ | |
9a915772 | 10221 | if (code == NE || code == EQ |
5f4f0e22 | 10222 | || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT |
3f508eca | 10223 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT |
230d793d | 10224 | && (STORE_FLAG_VALUE |
5f4f0e22 CH |
10225 | & (((HOST_WIDE_INT) 1 |
10226 | << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1)))) | |
aa6683f7 | 10227 | && (code == LT || code == GE))) |
230d793d | 10228 | { |
aa6683f7 GK |
10229 | enum rtx_code new_code; |
10230 | if (code == LT || code == NE) | |
10231 | new_code = GET_CODE (op0); | |
10232 | else | |
10233 | new_code = combine_reversed_comparison_code (op0); | |
23190837 | 10234 | |
aa6683f7 | 10235 | if (new_code != UNKNOWN) |
9a915772 | 10236 | { |
aa6683f7 GK |
10237 | code = new_code; |
10238 | op0 = tem; | |
10239 | op1 = tem1; | |
9a915772 JH |
10240 | continue; |
10241 | } | |
230d793d RS |
10242 | } |
10243 | break; | |
10244 | ||
10245 | case IOR: | |
da7d8304 | 10246 | /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero |
230d793d RS |
10247 | iff X <= 0. */ |
10248 | if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS | |
10249 | && XEXP (XEXP (op0, 0), 1) == constm1_rtx | |
10250 | && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1))) | |
10251 | { | |
10252 | op0 = XEXP (op0, 1); | |
10253 | code = (code == GE ? GT : LE); | |
10254 | continue; | |
10255 | } | |
10256 | break; | |
10257 | ||
10258 | case AND: | |
10259 | /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This | |
10260 | will be converted to a ZERO_EXTRACT later. */ | |
10261 | if (const_op == 0 && equality_comparison_p | |
45620ed4 | 10262 | && GET_CODE (XEXP (op0, 0)) == ASHIFT |
230d793d RS |
10263 | && XEXP (XEXP (op0, 0), 0) == const1_rtx) |
10264 | { | |
10265 | op0 = simplify_and_const_int | |
f1c6ba8b RK |
10266 | (op0, mode, gen_rtx_LSHIFTRT (mode, |
10267 | XEXP (op0, 1), | |
10268 | XEXP (XEXP (op0, 0), 1)), | |
5f4f0e22 | 10269 | (HOST_WIDE_INT) 1); |
230d793d RS |
10270 | continue; |
10271 | } | |
10272 | ||
10273 | /* If we are comparing (and (lshiftrt X C1) C2) for equality with | |
10274 | zero and X is a comparison and C1 and C2 describe only bits set | |
10275 | in STORE_FLAG_VALUE, we can compare with X. */ | |
10276 | if (const_op == 0 && equality_comparison_p | |
5f4f0e22 | 10277 | && mode_width <= HOST_BITS_PER_WIDE_INT |
230d793d RS |
10278 | && GET_CODE (XEXP (op0, 1)) == CONST_INT |
10279 | && GET_CODE (XEXP (op0, 0)) == LSHIFTRT | |
10280 | && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | |
10281 | && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0 | |
5f4f0e22 | 10282 | && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT) |
230d793d RS |
10283 | { |
10284 | mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode)) | |
10285 | << INTVAL (XEXP (XEXP (op0, 0), 1))); | |
663522cb | 10286 | if ((~STORE_FLAG_VALUE & mask) == 0 |
ec8e098d | 10287 | && (COMPARISON_P (XEXP (XEXP (op0, 0), 0)) |
230d793d | 10288 | || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0 |
ec8e098d | 10289 | && COMPARISON_P (tem)))) |
230d793d RS |
10290 | { |
10291 | op0 = XEXP (XEXP (op0, 0), 0); | |
10292 | continue; | |
10293 | } | |
10294 | } | |
10295 | ||
10296 | /* If we are doing an equality comparison of an AND of a bit equal | |
10297 | to the sign bit, replace this with a LT or GE comparison of | |
10298 | the underlying value. */ | |
10299 | if (equality_comparison_p | |
10300 | && const_op == 0 | |
10301 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
5f4f0e22 | 10302 | && mode_width <= HOST_BITS_PER_WIDE_INT |
230d793d | 10303 | && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode)) |
e51712db | 10304 | == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1))) |
230d793d RS |
10305 | { |
10306 | op0 = XEXP (op0, 0); | |
10307 | code = (code == EQ ? GE : LT); | |
10308 | continue; | |
10309 | } | |
10310 | ||
10311 | /* If this AND operation is really a ZERO_EXTEND from a narrower | |
10312 | mode, the constant fits within that mode, and this is either an | |
10313 | equality or unsigned comparison, try to do this comparison in | |
10314 | the narrower mode. */ | |
10315 | if ((equality_comparison_p || unsigned_comparison_p) | |
10316 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
10317 | && (i = exact_log2 ((INTVAL (XEXP (op0, 1)) | |
10318 | & GET_MODE_MASK (mode)) | |
10319 | + 1)) >= 0 | |
10320 | && const_op >> i == 0 | |
10321 | && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode) | |
10322 | { | |
4de249d9 | 10323 | op0 = gen_lowpart (tmode, XEXP (op0, 0)); |
230d793d RS |
10324 | continue; |
10325 | } | |
e5e809f4 | 10326 | |
70e1b8fc AM |
10327 | /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 |
10328 | fits in both M1 and M2 and the SUBREG is either paradoxical | |
10329 | or represents the low part, permute the SUBREG and the AND | |
10330 | and try again. */ | |
10331 | if (GET_CODE (XEXP (op0, 0)) == SUBREG) | |
10332 | { | |
10333 | unsigned HOST_WIDE_INT c1; | |
10334 | tmode = GET_MODE (SUBREG_REG (XEXP (op0, 0))); | |
678e68fc JW |
10335 | /* Require an integral mode, to avoid creating something like |
10336 | (AND:SF ...). */ | |
70e1b8fc AM |
10337 | if (SCALAR_INT_MODE_P (tmode) |
10338 | /* It is unsafe to commute the AND into the SUBREG if the | |
10339 | SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is | |
10340 | not defined. As originally written the upper bits | |
10341 | have a defined value due to the AND operation. | |
10342 | However, if we commute the AND inside the SUBREG then | |
10343 | they no longer have defined values and the meaning of | |
10344 | the code has been changed. */ | |
10345 | && (0 | |
9ec36da5 | 10346 | #ifdef WORD_REGISTER_OPERATIONS |
70e1b8fc AM |
10347 | || (mode_width > GET_MODE_BITSIZE (tmode) |
10348 | && mode_width <= BITS_PER_WORD) | |
9ec36da5 | 10349 | #endif |
70e1b8fc AM |
10350 | || (mode_width <= GET_MODE_BITSIZE (tmode) |
10351 | && subreg_lowpart_p (XEXP (op0, 0)))) | |
10352 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
10353 | && mode_width <= HOST_BITS_PER_WIDE_INT | |
10354 | && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT | |
10355 | && ((c1 = INTVAL (XEXP (op0, 1))) & ~mask) == 0 | |
10356 | && (c1 & ~GET_MODE_MASK (tmode)) == 0 | |
10357 | && c1 != mask | |
10358 | && c1 != GET_MODE_MASK (tmode)) | |
10359 | { | |
1999435c PB |
10360 | op0 = gen_binary (AND, tmode, |
10361 | SUBREG_REG (XEXP (op0, 0)), | |
10362 | gen_int_mode (c1, tmode)); | |
4de249d9 | 10363 | op0 = gen_lowpart (mode, op0); |
70e1b8fc AM |
10364 | continue; |
10365 | } | |
e5e809f4 JL |
10366 | } |
10367 | ||
34ed3bb0 KH |
10368 | /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0). */ |
10369 | if (const_op == 0 && equality_comparison_p | |
10370 | && XEXP (op0, 1) == const1_rtx | |
10371 | && GET_CODE (XEXP (op0, 0)) == NOT) | |
10372 | { | |
10373 | op0 = simplify_and_const_int | |
e5686da7 | 10374 | (NULL_RTX, mode, XEXP (XEXP (op0, 0), 0), (HOST_WIDE_INT) 1); |
34ed3bb0 KH |
10375 | code = (code == NE ? EQ : NE); |
10376 | continue; | |
10377 | } | |
10378 | ||
9f8e169e | 10379 | /* Convert (ne (and (lshiftrt (not X)) 1) 0) to |
5565e874 KH |
10380 | (eq (and (lshiftrt X) 1) 0). |
10381 | Also handle the case where (not X) is expressed using xor. */ | |
9f8e169e RH |
10382 | if (const_op == 0 && equality_comparison_p |
10383 | && XEXP (op0, 1) == const1_rtx | |
5565e874 | 10384 | && GET_CODE (XEXP (op0, 0)) == LSHIFTRT) |
9f8e169e | 10385 | { |
5565e874 KH |
10386 | rtx shift_op = XEXP (XEXP (op0, 0), 0); |
10387 | rtx shift_count = XEXP (XEXP (op0, 0), 1); | |
10388 | ||
10389 | if (GET_CODE (shift_op) == NOT | |
10390 | || (GET_CODE (shift_op) == XOR | |
10391 | && GET_CODE (XEXP (shift_op, 1)) == CONST_INT | |
10392 | && GET_CODE (shift_count) == CONST_INT | |
10393 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
10394 | && (INTVAL (XEXP (shift_op, 1)) | |
10395 | == (HOST_WIDE_INT) 1 << INTVAL (shift_count)))) | |
10396 | { | |
10397 | op0 = simplify_and_const_int | |
10398 | (NULL_RTX, mode, | |
10399 | gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count), | |
10400 | (HOST_WIDE_INT) 1); | |
10401 | code = (code == NE ? EQ : NE); | |
10402 | continue; | |
10403 | } | |
9f8e169e | 10404 | } |
230d793d RS |
10405 | break; |
10406 | ||
10407 | case ASHIFT: | |
45620ed4 | 10408 | /* If we have (compare (ashift FOO N) (const_int C)) and |
230d793d | 10409 | the high order N bits of FOO (N+1 if an inequality comparison) |
951553af | 10410 | are known to be zero, we can do this by comparing FOO with C |
230d793d RS |
10411 | shifted right N bits so long as the low-order N bits of C are |
10412 | zero. */ | |
10413 | if (GET_CODE (XEXP (op0, 1)) == CONST_INT | |
10414 | && INTVAL (XEXP (op0, 1)) >= 0 | |
10415 | && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p) | |
5f4f0e22 CH |
10416 | < HOST_BITS_PER_WIDE_INT) |
10417 | && ((const_op | |
34785d05 | 10418 | & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0) |
5f4f0e22 | 10419 | && mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 10420 | && (nonzero_bits (XEXP (op0, 0), mode) |
663522cb KH |
10421 | & ~(mask >> (INTVAL (XEXP (op0, 1)) |
10422 | + ! equality_comparison_p))) == 0) | |
230d793d | 10423 | { |
7ce787fe NC |
10424 | /* We must perform a logical shift, not an arithmetic one, |
10425 | as we want the top N bits of C to be zero. */ | |
aaaec114 | 10426 | unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode); |
663522cb | 10427 | |
7ce787fe | 10428 | temp >>= INTVAL (XEXP (op0, 1)); |
2496c7bd | 10429 | op1 = gen_int_mode (temp, mode); |
230d793d RS |
10430 | op0 = XEXP (op0, 0); |
10431 | continue; | |
10432 | } | |
10433 | ||
dfbe1b2f | 10434 | /* If we are doing a sign bit comparison, it means we are testing |
230d793d | 10435 | a particular bit. Convert it to the appropriate AND. */ |
dfbe1b2f | 10436 | if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT |
5f4f0e22 | 10437 | && mode_width <= HOST_BITS_PER_WIDE_INT) |
230d793d | 10438 | { |
5f4f0e22 CH |
10439 | op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), |
10440 | ((HOST_WIDE_INT) 1 | |
10441 | << (mode_width - 1 | |
10442 | - INTVAL (XEXP (op0, 1))))); | |
230d793d RS |
10443 | code = (code == LT ? NE : EQ); |
10444 | continue; | |
10445 | } | |
dfbe1b2f RK |
10446 | |
10447 | /* If this an equality comparison with zero and we are shifting | |
10448 | the low bit to the sign bit, we can convert this to an AND of the | |
10449 | low-order bit. */ | |
10450 | if (const_op == 0 && equality_comparison_p | |
10451 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
26c34780 RS |
10452 | && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) |
10453 | == mode_width - 1) | |
dfbe1b2f | 10454 | { |
5f4f0e22 CH |
10455 | op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), |
10456 | (HOST_WIDE_INT) 1); | |
dfbe1b2f RK |
10457 | continue; |
10458 | } | |
230d793d RS |
10459 | break; |
10460 | ||
10461 | case ASHIFTRT: | |
d0ab8cd3 RK |
10462 | /* If this is an equality comparison with zero, we can do this |
10463 | as a logical shift, which might be much simpler. */ | |
10464 | if (equality_comparison_p && const_op == 0 | |
10465 | && GET_CODE (XEXP (op0, 1)) == CONST_INT) | |
10466 | { | |
10467 | op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, | |
10468 | XEXP (op0, 0), | |
10469 | INTVAL (XEXP (op0, 1))); | |
10470 | continue; | |
10471 | } | |
10472 | ||
230d793d RS |
10473 | /* If OP0 is a sign extension and CODE is not an unsigned comparison, |
10474 | do the comparison in a narrower mode. */ | |
10475 | if (! unsigned_comparison_p | |
10476 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
10477 | && GET_CODE (XEXP (op0, 0)) == ASHIFT | |
10478 | && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1) | |
10479 | && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), | |
22331794 | 10480 | MODE_INT, 1)) != BLKmode |
67e469d7 AM |
10481 | && (((unsigned HOST_WIDE_INT) const_op |
10482 | + (GET_MODE_MASK (tmode) >> 1) + 1) | |
10483 | <= GET_MODE_MASK (tmode))) | |
230d793d | 10484 | { |
4de249d9 | 10485 | op0 = gen_lowpart (tmode, XEXP (XEXP (op0, 0), 0)); |
230d793d RS |
10486 | continue; |
10487 | } | |
10488 | ||
14a774a9 RK |
10489 | /* Likewise if OP0 is a PLUS of a sign extension with a |
10490 | constant, which is usually represented with the PLUS | |
10491 | between the shifts. */ | |
10492 | if (! unsigned_comparison_p | |
10493 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
10494 | && GET_CODE (XEXP (op0, 0)) == PLUS | |
10495 | && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | |
10496 | && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT | |
10497 | && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1) | |
10498 | && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), | |
10499 | MODE_INT, 1)) != BLKmode | |
67e469d7 AM |
10500 | && (((unsigned HOST_WIDE_INT) const_op |
10501 | + (GET_MODE_MASK (tmode) >> 1) + 1) | |
10502 | <= GET_MODE_MASK (tmode))) | |
14a774a9 RK |
10503 | { |
10504 | rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0); | |
10505 | rtx add_const = XEXP (XEXP (op0, 0), 1); | |
1999435c PB |
10506 | rtx new_const = gen_binary (ASHIFTRT, GET_MODE (op0), add_const, |
10507 | XEXP (op0, 1)); | |
14a774a9 | 10508 | |
1999435c PB |
10509 | op0 = gen_binary (PLUS, tmode, |
10510 | gen_lowpart (tmode, inner), | |
10511 | new_const); | |
14a774a9 RK |
10512 | continue; |
10513 | } | |
10514 | ||
0f41302f | 10515 | /* ... fall through ... */ |
230d793d RS |
10516 | case LSHIFTRT: |
10517 | /* If we have (compare (xshiftrt FOO N) (const_int C)) and | |
951553af | 10518 | the low order N bits of FOO are known to be zero, we can do this |
230d793d RS |
10519 | by comparing FOO with C shifted left N bits so long as no |
10520 | overflow occurs. */ | |
10521 | if (GET_CODE (XEXP (op0, 1)) == CONST_INT | |
10522 | && INTVAL (XEXP (op0, 1)) >= 0 | |
5f4f0e22 CH |
10523 | && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT |
10524 | && mode_width <= HOST_BITS_PER_WIDE_INT | |
951553af | 10525 | && (nonzero_bits (XEXP (op0, 0), mode) |
5f4f0e22 | 10526 | & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0 |
67e469d7 AM |
10527 | && (((unsigned HOST_WIDE_INT) const_op |
10528 | + (GET_CODE (op0) != LSHIFTRT | |
10529 | ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1) | |
10530 | + 1) | |
10531 | : 0)) | |
10532 | <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)))) | |
230d793d | 10533 | { |
145d3bf2 RE |
10534 | /* If the shift was logical, then we must make the condition |
10535 | unsigned. */ | |
10536 | if (GET_CODE (op0) == LSHIFTRT) | |
10537 | code = unsigned_condition (code); | |
10538 | ||
230d793d | 10539 | const_op <<= INTVAL (XEXP (op0, 1)); |
5f4f0e22 | 10540 | op1 = GEN_INT (const_op); |
230d793d RS |
10541 | op0 = XEXP (op0, 0); |
10542 | continue; | |
10543 | } | |
10544 | ||
10545 | /* If we are using this shift to extract just the sign bit, we | |
10546 | can replace this with an LT or GE comparison. */ | |
10547 | if (const_op == 0 | |
10548 | && (equality_comparison_p || sign_bit_comparison_p) | |
10549 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
26c34780 RS |
10550 | && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) |
10551 | == mode_width - 1) | |
230d793d RS |
10552 | { |
10553 | op0 = XEXP (op0, 0); | |
10554 | code = (code == NE || code == GT ? LT : GE); | |
10555 | continue; | |
10556 | } | |
10557 | break; | |
663522cb | 10558 | |
e9a25f70 JL |
10559 | default: |
10560 | break; | |
230d793d RS |
10561 | } |
10562 | ||
10563 | break; | |
10564 | } | |
10565 | ||
10566 | /* Now make any compound operations involved in this comparison. Then, | |
76d31c63 | 10567 | check for an outmost SUBREG on OP0 that is not doing anything or is |
5add6d1a JL |
10568 | paradoxical. The latter transformation must only be performed when |
10569 | it is known that the "extra" bits will be the same in op0 and op1 or | |
10570 | that they don't matter. There are three cases to consider: | |
10571 | ||
10572 | 1. SUBREG_REG (op0) is a register. In this case the bits are don't | |
10573 | care bits and we can assume they have any convenient value. So | |
10574 | making the transformation is safe. | |
10575 | ||
10576 | 2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined. | |
10577 | In this case the upper bits of op0 are undefined. We should not make | |
10578 | the simplification in that case as we do not know the contents of | |
10579 | those bits. | |
10580 | ||
10581 | 3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not | |
f822d252 | 10582 | UNKNOWN. In that case we know those bits are zeros or ones. We must |
5add6d1a JL |
10583 | also be sure that they are the same as the upper bits of op1. |
10584 | ||
10585 | We can never remove a SUBREG for a non-equality comparison because | |
10586 | the sign bit is in a different place in the underlying object. */ | |
230d793d RS |
10587 | |
10588 | op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET); | |
10589 | op1 = make_compound_operation (op1, SET); | |
10590 | ||
10591 | if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0) | |
10592 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT | |
fa4e13e0 | 10593 | && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT |
5add6d1a | 10594 | && (code == NE || code == EQ)) |
230d793d | 10595 | { |
5add6d1a JL |
10596 | if (GET_MODE_SIZE (GET_MODE (op0)) |
10597 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))) | |
10598 | { | |
dc5c3188 UW |
10599 | /* For paradoxical subregs, allow case 1 as above. Case 3 isn't |
10600 | implemented. */ | |
f8cfc6aa | 10601 | if (REG_P (SUBREG_REG (op0))) |
dc5c3188 UW |
10602 | { |
10603 | op0 = SUBREG_REG (op0); | |
4de249d9 | 10604 | op1 = gen_lowpart (GET_MODE (op0), op1); |
dc5c3188 | 10605 | } |
5add6d1a JL |
10606 | } |
10607 | else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) | |
10608 | <= HOST_BITS_PER_WIDE_INT) | |
10609 | && (nonzero_bits (SUBREG_REG (op0), | |
10610 | GET_MODE (SUBREG_REG (op0))) | |
10611 | & ~GET_MODE_MASK (GET_MODE (op0))) == 0) | |
10612 | { | |
4de249d9 | 10613 | tem = gen_lowpart (GET_MODE (SUBREG_REG (op0)), op1); |
230d793d | 10614 | |
5add6d1a JL |
10615 | if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0))) |
10616 | & ~GET_MODE_MASK (GET_MODE (op0))) == 0) | |
10617 | op0 = SUBREG_REG (op0), op1 = tem; | |
10618 | } | |
10619 | } | |
230d793d RS |
10620 | |
10621 | /* We now do the opposite procedure: Some machines don't have compare | |
10622 | insns in all modes. If OP0's mode is an integer mode smaller than a | |
10623 | word and we can't do a compare in that mode, see if there is a larger | |
a687e897 RK |
10624 | mode for which we can do the compare. There are a number of cases in |
10625 | which we can use the wider mode. */ | |
230d793d RS |
10626 | |
10627 | mode = GET_MODE (op0); | |
10628 | if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT | |
10629 | && GET_MODE_SIZE (mode) < UNITS_PER_WORD | |
ef89d648 | 10630 | && ! have_insn_for (COMPARE, mode)) |
230d793d | 10631 | for (tmode = GET_MODE_WIDER_MODE (mode); |
5f4f0e22 CH |
10632 | (tmode != VOIDmode |
10633 | && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT); | |
230d793d | 10634 | tmode = GET_MODE_WIDER_MODE (tmode)) |
ef89d648 | 10635 | if (have_insn_for (COMPARE, tmode)) |
230d793d | 10636 | { |
d4c5ac1f AM |
10637 | int zero_extended; |
10638 | ||
951553af | 10639 | /* If the only nonzero bits in OP0 and OP1 are those in the |
a687e897 RK |
10640 | narrower mode and this is an equality or unsigned comparison, |
10641 | we can use the wider mode. Similarly for sign-extended | |
7e4dc511 | 10642 | values, in which case it is true for all comparisons. */ |
d4c5ac1f AM |
10643 | zero_extended = ((code == EQ || code == NE |
10644 | || code == GEU || code == GTU | |
10645 | || code == LEU || code == LTU) | |
10646 | && (nonzero_bits (op0, tmode) | |
10647 | & ~GET_MODE_MASK (mode)) == 0 | |
10648 | && ((GET_CODE (op1) == CONST_INT | |
10649 | || (nonzero_bits (op1, tmode) | |
10650 | & ~GET_MODE_MASK (mode)) == 0))); | |
10651 | ||
10652 | if (zero_extended | |
7e4dc511 | 10653 | || ((num_sign_bit_copies (op0, tmode) |
26c34780 RS |
10654 | > (unsigned int) (GET_MODE_BITSIZE (tmode) |
10655 | - GET_MODE_BITSIZE (mode))) | |
a687e897 | 10656 | && (num_sign_bit_copies (op1, tmode) |
26c34780 RS |
10657 | > (unsigned int) (GET_MODE_BITSIZE (tmode) |
10658 | - GET_MODE_BITSIZE (mode))))) | |
a687e897 | 10659 | { |
14a774a9 RK |
10660 | /* If OP0 is an AND and we don't have an AND in MODE either, |
10661 | make a new AND in the proper mode. */ | |
10662 | if (GET_CODE (op0) == AND | |
ef89d648 | 10663 | && !have_insn_for (AND, mode)) |
1999435c PB |
10664 | op0 = gen_binary (AND, tmode, |
10665 | gen_lowpart (tmode, | |
10666 | XEXP (op0, 0)), | |
10667 | gen_lowpart (tmode, | |
10668 | XEXP (op0, 1))); | |
14a774a9 | 10669 | |
4de249d9 | 10670 | op0 = gen_lowpart (tmode, op0); |
d4c5ac1f AM |
10671 | if (zero_extended && GET_CODE (op1) == CONST_INT) |
10672 | op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (mode)); | |
4de249d9 | 10673 | op1 = gen_lowpart (tmode, op1); |
a687e897 RK |
10674 | break; |
10675 | } | |
230d793d | 10676 | |
a687e897 RK |
10677 | /* If this is a test for negative, we can make an explicit |
10678 | test of the sign bit. */ | |
10679 | ||
10680 | if (op1 == const0_rtx && (code == LT || code == GE) | |
10681 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
230d793d | 10682 | { |
1999435c PB |
10683 | op0 = gen_binary (AND, tmode, |
10684 | gen_lowpart (tmode, op0), | |
10685 | GEN_INT ((HOST_WIDE_INT) 1 | |
10686 | << (GET_MODE_BITSIZE (mode) - 1))); | |
230d793d | 10687 | code = (code == LT) ? NE : EQ; |
a687e897 | 10688 | break; |
230d793d | 10689 | } |
230d793d RS |
10690 | } |
10691 | ||
b7a775b2 RK |
10692 | #ifdef CANONICALIZE_COMPARISON |
10693 | /* If this machine only supports a subset of valid comparisons, see if we | |
10694 | can convert an unsupported one into a supported one. */ | |
10695 | CANONICALIZE_COMPARISON (code, op0, op1); | |
10696 | #endif | |
10697 | ||
230d793d RS |
10698 | *pop0 = op0; |
10699 | *pop1 = op1; | |
10700 | ||
10701 | return code; | |
10702 | } | |
10703 | \f | |
9a915772 JH |
10704 | /* Like jump.c' reversed_comparison_code, but use combine infrastructure for |
10705 | searching backward. */ | |
c3ffea50 | 10706 | static enum rtx_code |
79a490a9 | 10707 | combine_reversed_comparison_code (rtx exp) |
230d793d | 10708 | { |
cf0d9408 KH |
10709 | enum rtx_code code1 = reversed_comparison_code (exp, NULL); |
10710 | rtx x; | |
10711 | ||
10712 | if (code1 != UNKNOWN | |
10713 | || GET_MODE_CLASS (GET_MODE (XEXP (exp, 0))) != MODE_CC) | |
10714 | return code1; | |
10715 | /* Otherwise try and find where the condition codes were last set and | |
10716 | use that. */ | |
10717 | x = get_last_value (XEXP (exp, 0)); | |
10718 | if (!x || GET_CODE (x) != COMPARE) | |
10719 | return UNKNOWN; | |
10720 | return reversed_comparison_code_parts (GET_CODE (exp), | |
10721 | XEXP (x, 0), XEXP (x, 1), NULL); | |
9a915772 | 10722 | } |
e869aa39 | 10723 | |
9a915772 JH |
10724 | /* Return comparison with reversed code of EXP and operands OP0 and OP1. |
10725 | Return NULL_RTX in case we fail to do the reversal. */ | |
10726 | static rtx | |
79a490a9 | 10727 | reversed_comparison (rtx exp, enum machine_mode mode, rtx op0, rtx op1) |
9a915772 JH |
10728 | { |
10729 | enum rtx_code reversed_code = combine_reversed_comparison_code (exp); | |
10730 | if (reversed_code == UNKNOWN) | |
10731 | return NULL_RTX; | |
10732 | else | |
1999435c | 10733 | return gen_binary (reversed_code, mode, op0, op1); |
230d793d RS |
10734 | } |
10735 | \f | |
10736 | /* Utility function for following routine. Called when X is part of a value | |
5eaad481 | 10737 | being stored into last_set_value. Sets last_set_table_tick |
230d793d RS |
10738 | for each register mentioned. Similar to mention_regs in cse.c */ |
10739 | ||
10740 | static void | |
79a490a9 | 10741 | update_table_tick (rtx x) |
230d793d | 10742 | { |
b3694847 SS |
10743 | enum rtx_code code = GET_CODE (x); |
10744 | const char *fmt = GET_RTX_FORMAT (code); | |
10745 | int i; | |
230d793d RS |
10746 | |
10747 | if (code == REG) | |
10748 | { | |
770ae6cc RK |
10749 | unsigned int regno = REGNO (x); |
10750 | unsigned int endregno | |
10751 | = regno + (regno < FIRST_PSEUDO_REGISTER | |
66fd46b6 | 10752 | ? hard_regno_nregs[regno][GET_MODE (x)] : 1); |
770ae6cc | 10753 | unsigned int r; |
230d793d | 10754 | |
770ae6cc | 10755 | for (r = regno; r < endregno; r++) |
5eaad481 | 10756 | reg_stat[r].last_set_table_tick = label_tick; |
230d793d RS |
10757 | |
10758 | return; | |
10759 | } | |
663522cb | 10760 | |
230d793d RS |
10761 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
10762 | /* Note that we can't have an "E" in values stored; see | |
10763 | get_last_value_validate. */ | |
10764 | if (fmt[i] == 'e') | |
8fd73754 AN |
10765 | { |
10766 | /* Check for identical subexpressions. If x contains | |
10767 | identical subexpression we only have to traverse one of | |
10768 | them. */ | |
ec8e098d | 10769 | if (i == 0 && ARITHMETIC_P (x)) |
8fd73754 AN |
10770 | { |
10771 | /* Note that at this point x1 has already been | |
10772 | processed. */ | |
10773 | rtx x0 = XEXP (x, 0); | |
10774 | rtx x1 = XEXP (x, 1); | |
10775 | ||
10776 | /* If x0 and x1 are identical then there is no need to | |
10777 | process x0. */ | |
10778 | if (x0 == x1) | |
10779 | break; | |
10780 | ||
10781 | /* If x0 is identical to a subexpression of x1 then while | |
10782 | processing x1, x0 has already been processed. Thus we | |
10783 | are done with x. */ | |
ec8e098d | 10784 | if (ARITHMETIC_P (x1) |
8fd73754 AN |
10785 | && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1))) |
10786 | break; | |
10787 | ||
10788 | /* If x1 is identical to a subexpression of x0 then we | |
10789 | still have to process the rest of x0. */ | |
ec8e098d | 10790 | if (ARITHMETIC_P (x0) |
8fd73754 AN |
10791 | && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1))) |
10792 | { | |
10793 | update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0)); | |
10794 | break; | |
10795 | } | |
10796 | } | |
73a39fc4 | 10797 | |
8fd73754 AN |
10798 | update_table_tick (XEXP (x, i)); |
10799 | } | |
230d793d RS |
10800 | } |
10801 | ||
10802 | /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we | |
10803 | are saying that the register is clobbered and we no longer know its | |
5eaad481 PB |
10804 | value. If INSN is zero, don't update reg_stat[].last_set; this is |
10805 | only permitted with VALUE also zero and is used to invalidate the | |
10806 | register. */ | |
230d793d RS |
10807 | |
10808 | static void | |
79a490a9 | 10809 | record_value_for_reg (rtx reg, rtx insn, rtx value) |
230d793d | 10810 | { |
770ae6cc RK |
10811 | unsigned int regno = REGNO (reg); |
10812 | unsigned int endregno | |
10813 | = regno + (regno < FIRST_PSEUDO_REGISTER | |
66fd46b6 | 10814 | ? hard_regno_nregs[regno][GET_MODE (reg)] : 1); |
770ae6cc | 10815 | unsigned int i; |
230d793d RS |
10816 | |
10817 | /* If VALUE contains REG and we have a previous value for REG, substitute | |
10818 | the previous value. */ | |
10819 | if (value && insn && reg_overlap_mentioned_p (reg, value)) | |
10820 | { | |
10821 | rtx tem; | |
10822 | ||
10823 | /* Set things up so get_last_value is allowed to see anything set up to | |
10824 | our insn. */ | |
10825 | subst_low_cuid = INSN_CUID (insn); | |
663522cb | 10826 | tem = get_last_value (reg); |
230d793d | 10827 | |
14a774a9 RK |
10828 | /* If TEM is simply a binary operation with two CLOBBERs as operands, |
10829 | it isn't going to be useful and will take a lot of time to process, | |
10830 | so just use the CLOBBER. */ | |
10831 | ||
230d793d | 10832 | if (tem) |
14a774a9 | 10833 | { |
ec8e098d | 10834 | if (ARITHMETIC_P (tem) |
14a774a9 RK |
10835 | && GET_CODE (XEXP (tem, 0)) == CLOBBER |
10836 | && GET_CODE (XEXP (tem, 1)) == CLOBBER) | |
10837 | tem = XEXP (tem, 0); | |
10838 | ||
10839 | value = replace_rtx (copy_rtx (value), reg, tem); | |
10840 | } | |
230d793d RS |
10841 | } |
10842 | ||
10843 | /* For each register modified, show we don't know its value, that | |
ef026f91 RS |
10844 | we don't know about its bitwise content, that its value has been |
10845 | updated, and that we don't know the location of the death of the | |
10846 | register. */ | |
770ae6cc | 10847 | for (i = regno; i < endregno; i++) |
230d793d RS |
10848 | { |
10849 | if (insn) | |
5eaad481 | 10850 | reg_stat[i].last_set = insn; |
770ae6cc | 10851 | |
5eaad481 PB |
10852 | reg_stat[i].last_set_value = 0; |
10853 | reg_stat[i].last_set_mode = 0; | |
10854 | reg_stat[i].last_set_nonzero_bits = 0; | |
10855 | reg_stat[i].last_set_sign_bit_copies = 0; | |
10856 | reg_stat[i].last_death = 0; | |
230d793d RS |
10857 | } |
10858 | ||
10859 | /* Mark registers that are being referenced in this value. */ | |
10860 | if (value) | |
10861 | update_table_tick (value); | |
10862 | ||
10863 | /* Now update the status of each register being set. | |
10864 | If someone is using this register in this block, set this register | |
10865 | to invalid since we will get confused between the two lives in this | |
10866 | basic block. This makes using this register always invalid. In cse, we | |
10867 | scan the table to invalidate all entries using this register, but this | |
10868 | is too much work for us. */ | |
10869 | ||
10870 | for (i = regno; i < endregno; i++) | |
10871 | { | |
5eaad481 PB |
10872 | reg_stat[i].last_set_label = label_tick; |
10873 | if (value && reg_stat[i].last_set_table_tick == label_tick) | |
10874 | reg_stat[i].last_set_invalid = 1; | |
230d793d | 10875 | else |
5eaad481 | 10876 | reg_stat[i].last_set_invalid = 0; |
230d793d RS |
10877 | } |
10878 | ||
10879 | /* The value being assigned might refer to X (like in "x++;"). In that | |
10880 | case, we must replace it with (clobber (const_int 0)) to prevent | |
10881 | infinite loops. */ | |
9a893315 | 10882 | if (value && ! get_last_value_validate (&value, insn, |
5eaad481 | 10883 | reg_stat[regno].last_set_label, 0)) |
230d793d RS |
10884 | { |
10885 | value = copy_rtx (value); | |
9a893315 | 10886 | if (! get_last_value_validate (&value, insn, |
5eaad481 | 10887 | reg_stat[regno].last_set_label, 1)) |
230d793d RS |
10888 | value = 0; |
10889 | } | |
10890 | ||
55310dad RK |
10891 | /* For the main register being modified, update the value, the mode, the |
10892 | nonzero bits, and the number of sign bit copies. */ | |
10893 | ||
5eaad481 | 10894 | reg_stat[regno].last_set_value = value; |
230d793d | 10895 | |
55310dad RK |
10896 | if (value) |
10897 | { | |
0a0440c9 | 10898 | enum machine_mode mode = GET_MODE (reg); |
2afabb48 | 10899 | subst_low_cuid = INSN_CUID (insn); |
5eaad481 | 10900 | reg_stat[regno].last_set_mode = mode; |
0a0440c9 JJ |
10901 | if (GET_MODE_CLASS (mode) == MODE_INT |
10902 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
10903 | mode = nonzero_bits_mode; | |
5eaad481 PB |
10904 | reg_stat[regno].last_set_nonzero_bits = nonzero_bits (value, mode); |
10905 | reg_stat[regno].last_set_sign_bit_copies | |
55310dad RK |
10906 | = num_sign_bit_copies (value, GET_MODE (reg)); |
10907 | } | |
230d793d RS |
10908 | } |
10909 | ||
230d793d | 10910 | /* Called via note_stores from record_dead_and_set_regs to handle one |
84832317 MM |
10911 | SET or CLOBBER in an insn. DATA is the instruction in which the |
10912 | set is occurring. */ | |
230d793d RS |
10913 | |
10914 | static void | |
79a490a9 | 10915 | record_dead_and_set_regs_1 (rtx dest, rtx setter, void *data) |
230d793d | 10916 | { |
84832317 MM |
10917 | rtx record_dead_insn = (rtx) data; |
10918 | ||
ca89d290 RK |
10919 | if (GET_CODE (dest) == SUBREG) |
10920 | dest = SUBREG_REG (dest); | |
10921 | ||
f8cfc6aa | 10922 | if (REG_P (dest)) |
230d793d RS |
10923 | { |
10924 | /* If we are setting the whole register, we know its value. Otherwise | |
10925 | show that we don't know the value. We can handle SUBREG in | |
10926 | some cases. */ | |
10927 | if (GET_CODE (setter) == SET && dest == SET_DEST (setter)) | |
10928 | record_value_for_reg (dest, record_dead_insn, SET_SRC (setter)); | |
10929 | else if (GET_CODE (setter) == SET | |
10930 | && GET_CODE (SET_DEST (setter)) == SUBREG | |
10931 | && SUBREG_REG (SET_DEST (setter)) == dest | |
90bf8081 | 10932 | && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD |
230d793d | 10933 | && subreg_lowpart_p (SET_DEST (setter))) |
d0ab8cd3 | 10934 | record_value_for_reg (dest, record_dead_insn, |
4de249d9 | 10935 | gen_lowpart (GET_MODE (dest), |
d0ab8cd3 | 10936 | SET_SRC (setter))); |
230d793d | 10937 | else |
5f4f0e22 | 10938 | record_value_for_reg (dest, record_dead_insn, NULL_RTX); |
230d793d | 10939 | } |
3c0cb5de | 10940 | else if (MEM_P (dest) |
230d793d RS |
10941 | /* Ignore pushes, they clobber nothing. */ |
10942 | && ! push_operand (dest, GET_MODE (dest))) | |
10943 | mem_last_set = INSN_CUID (record_dead_insn); | |
10944 | } | |
10945 | ||
10946 | /* Update the records of when each REG was most recently set or killed | |
10947 | for the things done by INSN. This is the last thing done in processing | |
10948 | INSN in the combiner loop. | |
10949 | ||
5eaad481 PB |
10950 | We update reg_stat[], in particular fields last_set, last_set_value, |
10951 | last_set_mode, last_set_nonzero_bits, last_set_sign_bit_copies, | |
10952 | last_death, and also the similar information mem_last_set (which insn | |
10953 | most recently modified memory) and last_call_cuid (which insn was the | |
10954 | most recent subroutine call). */ | |
230d793d RS |
10955 | |
10956 | static void | |
79a490a9 | 10957 | record_dead_and_set_regs (rtx insn) |
230d793d | 10958 | { |
b3694847 | 10959 | rtx link; |
770ae6cc | 10960 | unsigned int i; |
55310dad | 10961 | |
230d793d RS |
10962 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
10963 | { | |
dbc131f3 | 10964 | if (REG_NOTE_KIND (link) == REG_DEAD |
f8cfc6aa | 10965 | && REG_P (XEXP (link, 0))) |
dbc131f3 | 10966 | { |
770ae6cc RK |
10967 | unsigned int regno = REGNO (XEXP (link, 0)); |
10968 | unsigned int endregno | |
dbc131f3 | 10969 | = regno + (regno < FIRST_PSEUDO_REGISTER |
66fd46b6 | 10970 | ? hard_regno_nregs[regno][GET_MODE (XEXP (link, 0))] |
dbc131f3 | 10971 | : 1); |
dbc131f3 RK |
10972 | |
10973 | for (i = regno; i < endregno; i++) | |
5eaad481 | 10974 | reg_stat[i].last_death = insn; |
dbc131f3 | 10975 | } |
230d793d | 10976 | else if (REG_NOTE_KIND (link) == REG_INC) |
5f4f0e22 | 10977 | record_value_for_reg (XEXP (link, 0), insn, NULL_RTX); |
230d793d RS |
10978 | } |
10979 | ||
4b4bf941 | 10980 | if (CALL_P (insn)) |
55310dad RK |
10981 | { |
10982 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
29655d3d | 10983 | if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i)) |
55310dad | 10984 | { |
5eaad481 PB |
10985 | reg_stat[i].last_set_value = 0; |
10986 | reg_stat[i].last_set_mode = 0; | |
10987 | reg_stat[i].last_set_nonzero_bits = 0; | |
10988 | reg_stat[i].last_set_sign_bit_copies = 0; | |
10989 | reg_stat[i].last_death = 0; | |
55310dad RK |
10990 | } |
10991 | ||
10992 | last_call_cuid = mem_last_set = INSN_CUID (insn); | |
29655d3d ZW |
10993 | |
10994 | /* Don't bother recording what this insn does. It might set the | |
10995 | return value register, but we can't combine into a call | |
10996 | pattern anyway, so there's no point trying (and it may cause | |
10997 | a crash, if e.g. we wind up asking for last_set_value of a | |
10998 | SUBREG of the return value register). */ | |
10999 | return; | |
55310dad | 11000 | } |
230d793d | 11001 | |
84832317 | 11002 | note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn); |
230d793d | 11003 | } |
732f2ac9 | 11004 | |
732f2ac9 JJ |
11005 | /* If a SUBREG has the promoted bit set, it is in fact a property of the |
11006 | register present in the SUBREG, so for each such SUBREG go back and | |
11007 | adjust nonzero and sign bit information of the registers that are | |
11008 | known to have some zero/sign bits set. | |
11009 | ||
11010 | This is needed because when combine blows the SUBREGs away, the | |
11011 | information on zero/sign bits is lost and further combines can be | |
11012 | missed because of that. */ | |
11013 | ||
11014 | static void | |
79a490a9 | 11015 | record_promoted_value (rtx insn, rtx subreg) |
732f2ac9 | 11016 | { |
4a71b24f | 11017 | rtx links, set; |
770ae6cc | 11018 | unsigned int regno = REGNO (SUBREG_REG (subreg)); |
732f2ac9 JJ |
11019 | enum machine_mode mode = GET_MODE (subreg); |
11020 | ||
25af74a0 | 11021 | if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT) |
732f2ac9 JJ |
11022 | return; |
11023 | ||
663522cb | 11024 | for (links = LOG_LINKS (insn); links;) |
732f2ac9 JJ |
11025 | { |
11026 | insn = XEXP (links, 0); | |
11027 | set = single_set (insn); | |
11028 | ||
f8cfc6aa | 11029 | if (! set || !REG_P (SET_DEST (set)) |
732f2ac9 JJ |
11030 | || REGNO (SET_DEST (set)) != regno |
11031 | || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg))) | |
11032 | { | |
11033 | links = XEXP (links, 1); | |
11034 | continue; | |
11035 | } | |
11036 | ||
5eaad481 | 11037 | if (reg_stat[regno].last_set == insn) |
663522cb | 11038 | { |
7879b81e | 11039 | if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0) |
5eaad481 | 11040 | reg_stat[regno].last_set_nonzero_bits &= GET_MODE_MASK (mode); |
663522cb | 11041 | } |
732f2ac9 | 11042 | |
f8cfc6aa | 11043 | if (REG_P (SET_SRC (set))) |
732f2ac9 JJ |
11044 | { |
11045 | regno = REGNO (SET_SRC (set)); | |
11046 | links = LOG_LINKS (insn); | |
11047 | } | |
11048 | else | |
11049 | break; | |
11050 | } | |
11051 | } | |
11052 | ||
11053 | /* Scan X for promoted SUBREGs. For each one found, | |
11054 | note what it implies to the registers used in it. */ | |
11055 | ||
11056 | static void | |
79a490a9 | 11057 | check_promoted_subreg (rtx insn, rtx x) |
732f2ac9 JJ |
11058 | { |
11059 | if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x) | |
f8cfc6aa | 11060 | && REG_P (SUBREG_REG (x))) |
732f2ac9 JJ |
11061 | record_promoted_value (insn, x); |
11062 | else | |
11063 | { | |
11064 | const char *format = GET_RTX_FORMAT (GET_CODE (x)); | |
11065 | int i, j; | |
11066 | ||
11067 | for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++) | |
663522cb | 11068 | switch (format[i]) |
732f2ac9 JJ |
11069 | { |
11070 | case 'e': | |
11071 | check_promoted_subreg (insn, XEXP (x, i)); | |
11072 | break; | |
11073 | case 'V': | |
11074 | case 'E': | |
11075 | if (XVEC (x, i) != 0) | |
11076 | for (j = 0; j < XVECLEN (x, i); j++) | |
11077 | check_promoted_subreg (insn, XVECEXP (x, i, j)); | |
11078 | break; | |
11079 | } | |
11080 | } | |
11081 | } | |
230d793d RS |
11082 | \f |
11083 | /* Utility routine for the following function. Verify that all the registers | |
11084 | mentioned in *LOC are valid when *LOC was part of a value set when | |
11085 | label_tick == TICK. Return 0 if some are not. | |
11086 | ||
da7d8304 | 11087 | If REPLACE is nonzero, replace the invalid reference with |
230d793d RS |
11088 | (clobber (const_int 0)) and return 1. This replacement is useful because |
11089 | we often can get useful information about the form of a value (e.g., if | |
11090 | it was produced by a shift that always produces -1 or 0) even though | |
11091 | we don't know exactly what registers it was produced from. */ | |
11092 | ||
11093 | static int | |
79a490a9 | 11094 | get_last_value_validate (rtx *loc, rtx insn, int tick, int replace) |
230d793d RS |
11095 | { |
11096 | rtx x = *loc; | |
6f7d635c | 11097 | const char *fmt = GET_RTX_FORMAT (GET_CODE (x)); |
230d793d RS |
11098 | int len = GET_RTX_LENGTH (GET_CODE (x)); |
11099 | int i; | |
11100 | ||
f8cfc6aa | 11101 | if (REG_P (x)) |
230d793d | 11102 | { |
770ae6cc RK |
11103 | unsigned int regno = REGNO (x); |
11104 | unsigned int endregno | |
11105 | = regno + (regno < FIRST_PSEUDO_REGISTER | |
66fd46b6 | 11106 | ? hard_regno_nregs[regno][GET_MODE (x)] : 1); |
770ae6cc | 11107 | unsigned int j; |
230d793d RS |
11108 | |
11109 | for (j = regno; j < endregno; j++) | |
5eaad481 | 11110 | if (reg_stat[j].last_set_invalid |
57cf50a4 GRK |
11111 | /* If this is a pseudo-register that was only set once and not |
11112 | live at the beginning of the function, it is always valid. */ | |
663522cb | 11113 | || (! (regno >= FIRST_PSEUDO_REGISTER |
57cf50a4 | 11114 | && REG_N_SETS (regno) == 1 |
770ae6cc | 11115 | && (! REGNO_REG_SET_P |
f6366fc7 | 11116 | (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno))) |
5eaad481 | 11117 | && reg_stat[j].last_set_label > tick)) |
230d793d RS |
11118 | { |
11119 | if (replace) | |
38a448ca | 11120 | *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); |
230d793d RS |
11121 | return replace; |
11122 | } | |
11123 | ||
11124 | return 1; | |
11125 | } | |
9a893315 JW |
11126 | /* If this is a memory reference, make sure that there were |
11127 | no stores after it that might have clobbered the value. We don't | |
11128 | have alias info, so we assume any store invalidates it. */ | |
389fdba0 | 11129 | else if (MEM_P (x) && !MEM_READONLY_P (x) |
9a893315 JW |
11130 | && INSN_CUID (insn) <= mem_last_set) |
11131 | { | |
11132 | if (replace) | |
38a448ca | 11133 | *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); |
9a893315 JW |
11134 | return replace; |
11135 | } | |
230d793d RS |
11136 | |
11137 | for (i = 0; i < len; i++) | |
8fd73754 AN |
11138 | { |
11139 | if (fmt[i] == 'e') | |
11140 | { | |
11141 | /* Check for identical subexpressions. If x contains | |
11142 | identical subexpression we only have to traverse one of | |
11143 | them. */ | |
ec8e098d | 11144 | if (i == 1 && ARITHMETIC_P (x)) |
8fd73754 AN |
11145 | { |
11146 | /* Note that at this point x0 has already been checked | |
11147 | and found valid. */ | |
11148 | rtx x0 = XEXP (x, 0); | |
11149 | rtx x1 = XEXP (x, 1); | |
11150 | ||
11151 | /* If x0 and x1 are identical then x is also valid. */ | |
11152 | if (x0 == x1) | |
11153 | return 1; | |
11154 | ||
11155 | /* If x1 is identical to a subexpression of x0 then | |
11156 | while checking x0, x1 has already been checked. Thus | |
11157 | it is valid and so as x. */ | |
ec8e098d | 11158 | if (ARITHMETIC_P (x0) |
8fd73754 AN |
11159 | && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1))) |
11160 | return 1; | |
11161 | ||
11162 | /* If x0 is identical to a subexpression of x1 then x is | |
11163 | valid iff the rest of x1 is valid. */ | |
ec8e098d | 11164 | if (ARITHMETIC_P (x1) |
8fd73754 AN |
11165 | && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1))) |
11166 | return | |
11167 | get_last_value_validate (&XEXP (x1, | |
11168 | x0 == XEXP (x1, 0) ? 1 : 0), | |
11169 | insn, tick, replace); | |
11170 | } | |
11171 | ||
11172 | if (get_last_value_validate (&XEXP (x, i), insn, tick, | |
11173 | replace) == 0) | |
11174 | return 0; | |
11175 | } | |
11176 | /* Don't bother with these. They shouldn't occur anyway. */ | |
11177 | else if (fmt[i] == 'E') | |
11178 | return 0; | |
11179 | } | |
230d793d RS |
11180 | |
11181 | /* If we haven't found a reason for it to be invalid, it is valid. */ | |
11182 | return 1; | |
11183 | } | |
11184 | ||
11185 | /* Get the last value assigned to X, if known. Some registers | |
11186 | in the value may be replaced with (clobber (const_int 0)) if their value | |
11187 | is known longer known reliably. */ | |
11188 | ||
11189 | static rtx | |
79a490a9 | 11190 | get_last_value (rtx x) |
230d793d | 11191 | { |
770ae6cc | 11192 | unsigned int regno; |
230d793d RS |
11193 | rtx value; |
11194 | ||
11195 | /* If this is a non-paradoxical SUBREG, get the value of its operand and | |
11196 | then convert it to the desired mode. If this is a paradoxical SUBREG, | |
0f41302f | 11197 | we cannot predict what values the "extra" bits might have. */ |
230d793d RS |
11198 | if (GET_CODE (x) == SUBREG |
11199 | && subreg_lowpart_p (x) | |
11200 | && (GET_MODE_SIZE (GET_MODE (x)) | |
11201 | <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
11202 | && (value = get_last_value (SUBREG_REG (x))) != 0) | |
4de249d9 | 11203 | return gen_lowpart (GET_MODE (x), value); |
230d793d | 11204 | |
f8cfc6aa | 11205 | if (!REG_P (x)) |
230d793d RS |
11206 | return 0; |
11207 | ||
11208 | regno = REGNO (x); | |
5eaad481 | 11209 | value = reg_stat[regno].last_set_value; |
230d793d | 11210 | |
57cf50a4 GRK |
11211 | /* If we don't have a value, or if it isn't for this basic block and |
11212 | it's either a hard register, set more than once, or it's a live | |
663522cb | 11213 | at the beginning of the function, return 0. |
57cf50a4 | 11214 | |
eaec9b3d | 11215 | Because if it's not live at the beginning of the function then the reg |
57cf50a4 GRK |
11216 | is always set before being used (is never used without being set). |
11217 | And, if it's set only once, and it's always set before use, then all | |
11218 | uses must have the same last value, even if it's not from this basic | |
11219 | block. */ | |
230d793d RS |
11220 | |
11221 | if (value == 0 | |
5eaad481 | 11222 | || (reg_stat[regno].last_set_label != label_tick |
57cf50a4 GRK |
11223 | && (regno < FIRST_PSEUDO_REGISTER |
11224 | || REG_N_SETS (regno) != 1 | |
770ae6cc | 11225 | || (REGNO_REG_SET_P |
f6366fc7 | 11226 | (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno))))) |
230d793d RS |
11227 | return 0; |
11228 | ||
4255220d | 11229 | /* If the value was set in a later insn than the ones we are processing, |
ca4cd906 | 11230 | we can't use it even if the register was only set once. */ |
5eaad481 | 11231 | if (INSN_CUID (reg_stat[regno].last_set) >= subst_low_cuid) |
ca4cd906 | 11232 | return 0; |
d0ab8cd3 RK |
11233 | |
11234 | /* If the value has all its registers valid, return it. */ | |
5eaad481 PB |
11235 | if (get_last_value_validate (&value, reg_stat[regno].last_set, |
11236 | reg_stat[regno].last_set_label, 0)) | |
230d793d RS |
11237 | return value; |
11238 | ||
11239 | /* Otherwise, make a copy and replace any invalid register with | |
11240 | (clobber (const_int 0)). If that fails for some reason, return 0. */ | |
11241 | ||
11242 | value = copy_rtx (value); | |
5eaad481 PB |
11243 | if (get_last_value_validate (&value, reg_stat[regno].last_set, |
11244 | reg_stat[regno].last_set_label, 1)) | |
230d793d RS |
11245 | return value; |
11246 | ||
11247 | return 0; | |
11248 | } | |
11249 | \f | |
11250 | /* Return nonzero if expression X refers to a REG or to memory | |
11251 | that is set in an instruction more recent than FROM_CUID. */ | |
11252 | ||
11253 | static int | |
79a490a9 | 11254 | use_crosses_set_p (rtx x, int from_cuid) |
230d793d | 11255 | { |
b3694847 SS |
11256 | const char *fmt; |
11257 | int i; | |
11258 | enum rtx_code code = GET_CODE (x); | |
230d793d RS |
11259 | |
11260 | if (code == REG) | |
11261 | { | |
770ae6cc RK |
11262 | unsigned int regno = REGNO (x); |
11263 | unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER | |
66fd46b6 | 11264 | ? hard_regno_nregs[regno][GET_MODE (x)] : 1); |
663522cb | 11265 | |
230d793d RS |
11266 | #ifdef PUSH_ROUNDING |
11267 | /* Don't allow uses of the stack pointer to be moved, | |
11268 | because we don't know whether the move crosses a push insn. */ | |
f73ad30e | 11269 | if (regno == STACK_POINTER_REGNUM && PUSH_ARGS) |
230d793d RS |
11270 | return 1; |
11271 | #endif | |
770ae6cc | 11272 | for (; regno < endreg; regno++) |
5eaad481 PB |
11273 | if (reg_stat[regno].last_set |
11274 | && INSN_CUID (reg_stat[regno].last_set) > from_cuid) | |
e28f5732 RK |
11275 | return 1; |
11276 | return 0; | |
230d793d RS |
11277 | } |
11278 | ||
11279 | if (code == MEM && mem_last_set > from_cuid) | |
11280 | return 1; | |
11281 | ||
11282 | fmt = GET_RTX_FORMAT (code); | |
11283 | ||
11284 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
11285 | { | |
11286 | if (fmt[i] == 'E') | |
11287 | { | |
b3694847 | 11288 | int j; |
230d793d RS |
11289 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
11290 | if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid)) | |
11291 | return 1; | |
11292 | } | |
11293 | else if (fmt[i] == 'e' | |
11294 | && use_crosses_set_p (XEXP (x, i), from_cuid)) | |
11295 | return 1; | |
11296 | } | |
11297 | return 0; | |
11298 | } | |
11299 | \f | |
11300 | /* Define three variables used for communication between the following | |
11301 | routines. */ | |
11302 | ||
770ae6cc | 11303 | static unsigned int reg_dead_regno, reg_dead_endregno; |
230d793d RS |
11304 | static int reg_dead_flag; |
11305 | ||
11306 | /* Function called via note_stores from reg_dead_at_p. | |
11307 | ||
663522cb | 11308 | If DEST is within [reg_dead_regno, reg_dead_endregno), set |
230d793d RS |
11309 | reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */ |
11310 | ||
11311 | static void | |
79a490a9 | 11312 | reg_dead_at_p_1 (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED) |
230d793d | 11313 | { |
770ae6cc | 11314 | unsigned int regno, endregno; |
230d793d | 11315 | |
f8cfc6aa | 11316 | if (!REG_P (dest)) |
230d793d RS |
11317 | return; |
11318 | ||
11319 | regno = REGNO (dest); | |
663522cb | 11320 | endregno = regno + (regno < FIRST_PSEUDO_REGISTER |
66fd46b6 | 11321 | ? hard_regno_nregs[regno][GET_MODE (dest)] : 1); |
230d793d RS |
11322 | |
11323 | if (reg_dead_endregno > regno && reg_dead_regno < endregno) | |
11324 | reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1; | |
11325 | } | |
11326 | ||
da7d8304 | 11327 | /* Return nonzero if REG is known to be dead at INSN. |
230d793d RS |
11328 | |
11329 | We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER | |
11330 | referencing REG, it is dead. If we hit a SET referencing REG, it is | |
11331 | live. Otherwise, see if it is live or dead at the start of the basic | |
6e25d159 RK |
11332 | block we are in. Hard regs marked as being live in NEWPAT_USED_REGS |
11333 | must be assumed to be always live. */ | |
230d793d RS |
11334 | |
11335 | static int | |
79a490a9 | 11336 | reg_dead_at_p (rtx reg, rtx insn) |
230d793d | 11337 | { |
e0082a72 | 11338 | basic_block block; |
770ae6cc | 11339 | unsigned int i; |
230d793d RS |
11340 | |
11341 | /* Set variables for reg_dead_at_p_1. */ | |
11342 | reg_dead_regno = REGNO (reg); | |
11343 | reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER | |
66fd46b6 JH |
11344 | ? hard_regno_nregs[reg_dead_regno] |
11345 | [GET_MODE (reg)] | |
230d793d RS |
11346 | : 1); |
11347 | ||
11348 | reg_dead_flag = 0; | |
11349 | ||
45da19e3 UW |
11350 | /* Check that reg isn't mentioned in NEWPAT_USED_REGS. For fixed registers |
11351 | we allow the machine description to decide whether use-and-clobber | |
11352 | patterns are OK. */ | |
6e25d159 RK |
11353 | if (reg_dead_regno < FIRST_PSEUDO_REGISTER) |
11354 | { | |
11355 | for (i = reg_dead_regno; i < reg_dead_endregno; i++) | |
45da19e3 | 11356 | if (!fixed_regs[i] && TEST_HARD_REG_BIT (newpat_used_regs, i)) |
6e25d159 RK |
11357 | return 0; |
11358 | } | |
11359 | ||
230d793d RS |
11360 | /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or |
11361 | beginning of function. */ | |
4b4bf941 | 11362 | for (; insn && !LABEL_P (insn) && !BARRIER_P (insn); |
230d793d RS |
11363 | insn = prev_nonnote_insn (insn)) |
11364 | { | |
84832317 | 11365 | note_stores (PATTERN (insn), reg_dead_at_p_1, NULL); |
230d793d RS |
11366 | if (reg_dead_flag) |
11367 | return reg_dead_flag == 1 ? 1 : 0; | |
11368 | ||
11369 | if (find_regno_note (insn, REG_DEAD, reg_dead_regno)) | |
11370 | return 1; | |
11371 | } | |
11372 | ||
e0082a72 | 11373 | /* Get the basic block that we were in. */ |
230d793d | 11374 | if (insn == 0) |
e0082a72 | 11375 | block = ENTRY_BLOCK_PTR->next_bb; |
230d793d RS |
11376 | else |
11377 | { | |
e0082a72 | 11378 | FOR_EACH_BB (block) |
a813c111 | 11379 | if (insn == BB_HEAD (block)) |
230d793d RS |
11380 | break; |
11381 | ||
e0082a72 | 11382 | if (block == EXIT_BLOCK_PTR) |
230d793d RS |
11383 | return 0; |
11384 | } | |
11385 | ||
11386 | for (i = reg_dead_regno; i < reg_dead_endregno; i++) | |
e0082a72 | 11387 | if (REGNO_REG_SET_P (block->global_live_at_start, i)) |
230d793d RS |
11388 | return 0; |
11389 | ||
11390 | return 1; | |
11391 | } | |
6e25d159 RK |
11392 | \f |
11393 | /* Note hard registers in X that are used. This code is similar to | |
11394 | that in flow.c, but much simpler since we don't care about pseudos. */ | |
11395 | ||
11396 | static void | |
79a490a9 | 11397 | mark_used_regs_combine (rtx x) |
6e25d159 | 11398 | { |
770ae6cc RK |
11399 | RTX_CODE code = GET_CODE (x); |
11400 | unsigned int regno; | |
6e25d159 RK |
11401 | int i; |
11402 | ||
11403 | switch (code) | |
11404 | { | |
11405 | case LABEL_REF: | |
11406 | case SYMBOL_REF: | |
11407 | case CONST_INT: | |
11408 | case CONST: | |
11409 | case CONST_DOUBLE: | |
69ef87e2 | 11410 | case CONST_VECTOR: |
6e25d159 RK |
11411 | case PC: |
11412 | case ADDR_VEC: | |
11413 | case ADDR_DIFF_VEC: | |
11414 | case ASM_INPUT: | |
11415 | #ifdef HAVE_cc0 | |
11416 | /* CC0 must die in the insn after it is set, so we don't need to take | |
11417 | special note of it here. */ | |
11418 | case CC0: | |
11419 | #endif | |
11420 | return; | |
11421 | ||
11422 | case CLOBBER: | |
11423 | /* If we are clobbering a MEM, mark any hard registers inside the | |
11424 | address as used. */ | |
3c0cb5de | 11425 | if (MEM_P (XEXP (x, 0))) |
6e25d159 RK |
11426 | mark_used_regs_combine (XEXP (XEXP (x, 0), 0)); |
11427 | return; | |
11428 | ||
11429 | case REG: | |
11430 | regno = REGNO (x); | |
11431 | /* A hard reg in a wide mode may really be multiple registers. | |
11432 | If so, mark all of them just like the first. */ | |
11433 | if (regno < FIRST_PSEUDO_REGISTER) | |
11434 | { | |
770ae6cc RK |
11435 | unsigned int endregno, r; |
11436 | ||
3eae4643 | 11437 | /* None of this applies to the stack, frame or arg pointers. */ |
6e25d159 RK |
11438 | if (regno == STACK_POINTER_REGNUM |
11439 | #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | |
11440 | || regno == HARD_FRAME_POINTER_REGNUM | |
11441 | #endif | |
11442 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
11443 | || (regno == ARG_POINTER_REGNUM && fixed_regs[regno]) | |
11444 | #endif | |
11445 | || regno == FRAME_POINTER_REGNUM) | |
11446 | return; | |
11447 | ||
66fd46b6 | 11448 | endregno = regno + hard_regno_nregs[regno][GET_MODE (x)]; |
770ae6cc RK |
11449 | for (r = regno; r < endregno; r++) |
11450 | SET_HARD_REG_BIT (newpat_used_regs, r); | |
6e25d159 RK |
11451 | } |
11452 | return; | |
11453 | ||
11454 | case SET: | |
11455 | { | |
11456 | /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in | |
11457 | the address. */ | |
b3694847 | 11458 | rtx testreg = SET_DEST (x); |
6e25d159 | 11459 | |
e048778f RK |
11460 | while (GET_CODE (testreg) == SUBREG |
11461 | || GET_CODE (testreg) == ZERO_EXTRACT | |
e048778f | 11462 | || GET_CODE (testreg) == STRICT_LOW_PART) |
6e25d159 RK |
11463 | testreg = XEXP (testreg, 0); |
11464 | ||
3c0cb5de | 11465 | if (MEM_P (testreg)) |
6e25d159 RK |
11466 | mark_used_regs_combine (XEXP (testreg, 0)); |
11467 | ||
11468 | mark_used_regs_combine (SET_SRC (x)); | |
6e25d159 | 11469 | } |
e9a25f70 JL |
11470 | return; |
11471 | ||
11472 | default: | |
11473 | break; | |
6e25d159 RK |
11474 | } |
11475 | ||
11476 | /* Recursively scan the operands of this expression. */ | |
11477 | ||
11478 | { | |
b3694847 | 11479 | const char *fmt = GET_RTX_FORMAT (code); |
6e25d159 RK |
11480 | |
11481 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
11482 | { | |
663522cb | 11483 | if (fmt[i] == 'e') |
6e25d159 | 11484 | mark_used_regs_combine (XEXP (x, i)); |
663522cb KH |
11485 | else if (fmt[i] == 'E') |
11486 | { | |
b3694847 | 11487 | int j; |
6e25d159 | 11488 | |
663522cb KH |
11489 | for (j = 0; j < XVECLEN (x, i); j++) |
11490 | mark_used_regs_combine (XVECEXP (x, i, j)); | |
11491 | } | |
6e25d159 RK |
11492 | } |
11493 | } | |
11494 | } | |
230d793d RS |
11495 | \f |
11496 | /* Remove register number REGNO from the dead registers list of INSN. | |
11497 | ||
11498 | Return the note used to record the death, if there was one. */ | |
11499 | ||
11500 | rtx | |
79a490a9 | 11501 | remove_death (unsigned int regno, rtx insn) |
230d793d | 11502 | { |
b3694847 | 11503 | rtx note = find_regno_note (insn, REG_DEAD, regno); |
230d793d RS |
11504 | |
11505 | if (note) | |
1a26b032 | 11506 | { |
b1f21e0a | 11507 | REG_N_DEATHS (regno)--; |
1a26b032 RK |
11508 | remove_note (insn, note); |
11509 | } | |
230d793d RS |
11510 | |
11511 | return note; | |
11512 | } | |
11513 | ||
11514 | /* For each register (hardware or pseudo) used within expression X, if its | |
11515 | death is in an instruction with cuid between FROM_CUID (inclusive) and | |
11516 | TO_INSN (exclusive), put a REG_DEAD note for that register in the | |
663522cb | 11517 | list headed by PNOTES. |
230d793d | 11518 | |
6eb12cef RK |
11519 | That said, don't move registers killed by maybe_kill_insn. |
11520 | ||
230d793d RS |
11521 | This is done when X is being merged by combination into TO_INSN. These |
11522 | notes will then be distributed as needed. */ | |
11523 | ||
11524 | static void | |
79a490a9 AJ |
11525 | move_deaths (rtx x, rtx maybe_kill_insn, int from_cuid, rtx to_insn, |
11526 | rtx *pnotes) | |
230d793d | 11527 | { |
b3694847 SS |
11528 | const char *fmt; |
11529 | int len, i; | |
11530 | enum rtx_code code = GET_CODE (x); | |
230d793d RS |
11531 | |
11532 | if (code == REG) | |
11533 | { | |
770ae6cc | 11534 | unsigned int regno = REGNO (x); |
5eaad481 | 11535 | rtx where_dead = reg_stat[regno].last_death; |
b3694847 | 11536 | rtx before_dead, after_dead; |
e340018d | 11537 | |
3eae4643 | 11538 | /* Don't move the register if it gets killed in between from and to. */ |
6eb12cef | 11539 | if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn) |
770ae6cc | 11540 | && ! reg_referenced_p (x, maybe_kill_insn)) |
6eb12cef RK |
11541 | return; |
11542 | ||
e340018d JW |
11543 | /* WHERE_DEAD could be a USE insn made by combine, so first we |
11544 | make sure that we have insns with valid INSN_CUID values. */ | |
11545 | before_dead = where_dead; | |
11546 | while (before_dead && INSN_UID (before_dead) > max_uid_cuid) | |
11547 | before_dead = PREV_INSN (before_dead); | |
770ae6cc | 11548 | |
e340018d JW |
11549 | after_dead = where_dead; |
11550 | while (after_dead && INSN_UID (after_dead) > max_uid_cuid) | |
11551 | after_dead = NEXT_INSN (after_dead); | |
11552 | ||
11553 | if (before_dead && after_dead | |
11554 | && INSN_CUID (before_dead) >= from_cuid | |
11555 | && (INSN_CUID (after_dead) < INSN_CUID (to_insn) | |
11556 | || (where_dead != after_dead | |
11557 | && INSN_CUID (after_dead) == INSN_CUID (to_insn)))) | |
230d793d | 11558 | { |
dbc131f3 | 11559 | rtx note = remove_death (regno, where_dead); |
230d793d RS |
11560 | |
11561 | /* It is possible for the call above to return 0. This can occur | |
5eaad481 | 11562 | when last_death points to I2 or I1 that we combined with. |
dbc131f3 RK |
11563 | In that case make a new note. |
11564 | ||
11565 | We must also check for the case where X is a hard register | |
11566 | and NOTE is a death note for a range of hard registers | |
11567 | including X. In that case, we must put REG_DEAD notes for | |
11568 | the remaining registers in place of NOTE. */ | |
11569 | ||
11570 | if (note != 0 && regno < FIRST_PSEUDO_REGISTER | |
11571 | && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0))) | |
24e46fc4 | 11572 | > GET_MODE_SIZE (GET_MODE (x)))) |
dbc131f3 | 11573 | { |
770ae6cc RK |
11574 | unsigned int deadregno = REGNO (XEXP (note, 0)); |
11575 | unsigned int deadend | |
66fd46b6 JH |
11576 | = (deadregno + hard_regno_nregs[deadregno] |
11577 | [GET_MODE (XEXP (note, 0))]); | |
770ae6cc | 11578 | unsigned int ourend |
66fd46b6 | 11579 | = regno + hard_regno_nregs[regno][GET_MODE (x)]; |
770ae6cc | 11580 | unsigned int i; |
dbc131f3 RK |
11581 | |
11582 | for (i = deadregno; i < deadend; i++) | |
11583 | if (i < regno || i >= ourend) | |
11584 | REG_NOTES (where_dead) | |
38a448ca | 11585 | = gen_rtx_EXPR_LIST (REG_DEAD, |
e50126e8 | 11586 | regno_reg_rtx[i], |
38a448ca | 11587 | REG_NOTES (where_dead)); |
dbc131f3 | 11588 | } |
770ae6cc | 11589 | |
24e46fc4 JW |
11590 | /* If we didn't find any note, or if we found a REG_DEAD note that |
11591 | covers only part of the given reg, and we have a multi-reg hard | |
fabd69e8 RK |
11592 | register, then to be safe we must check for REG_DEAD notes |
11593 | for each register other than the first. They could have | |
11594 | their own REG_DEAD notes lying around. */ | |
24e46fc4 JW |
11595 | else if ((note == 0 |
11596 | || (note != 0 | |
11597 | && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0))) | |
11598 | < GET_MODE_SIZE (GET_MODE (x))))) | |
11599 | && regno < FIRST_PSEUDO_REGISTER | |
66fd46b6 | 11600 | && hard_regno_nregs[regno][GET_MODE (x)] > 1) |
fabd69e8 | 11601 | { |
770ae6cc | 11602 | unsigned int ourend |
66fd46b6 | 11603 | = regno + hard_regno_nregs[regno][GET_MODE (x)]; |
770ae6cc | 11604 | unsigned int i, offset; |
fabd69e8 RK |
11605 | rtx oldnotes = 0; |
11606 | ||
24e46fc4 | 11607 | if (note) |
66fd46b6 | 11608 | offset = hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))]; |
24e46fc4 JW |
11609 | else |
11610 | offset = 1; | |
11611 | ||
11612 | for (i = regno + offset; i < ourend; i++) | |
e50126e8 | 11613 | move_deaths (regno_reg_rtx[i], |
6eb12cef | 11614 | maybe_kill_insn, from_cuid, to_insn, &oldnotes); |
fabd69e8 | 11615 | } |
230d793d | 11616 | |
dbc131f3 | 11617 | if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x)) |
230d793d RS |
11618 | { |
11619 | XEXP (note, 1) = *pnotes; | |
11620 | *pnotes = note; | |
11621 | } | |
11622 | else | |
38a448ca | 11623 | *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes); |
1a26b032 | 11624 | |
b1f21e0a | 11625 | REG_N_DEATHS (regno)++; |
230d793d RS |
11626 | } |
11627 | ||
11628 | return; | |
11629 | } | |
11630 | ||
11631 | else if (GET_CODE (x) == SET) | |
11632 | { | |
11633 | rtx dest = SET_DEST (x); | |
11634 | ||
6eb12cef | 11635 | move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes); |
230d793d | 11636 | |
a7c99304 RK |
11637 | /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG |
11638 | that accesses one word of a multi-word item, some | |
11639 | piece of everything register in the expression is used by | |
11640 | this insn, so remove any old death. */ | |
ddef6bc7 | 11641 | /* ??? So why do we test for equality of the sizes? */ |
a7c99304 RK |
11642 | |
11643 | if (GET_CODE (dest) == ZERO_EXTRACT | |
11644 | || GET_CODE (dest) == STRICT_LOW_PART | |
11645 | || (GET_CODE (dest) == SUBREG | |
11646 | && (((GET_MODE_SIZE (GET_MODE (dest)) | |
11647 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD) | |
11648 | == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) | |
11649 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))) | |
230d793d | 11650 | { |
6eb12cef | 11651 | move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes); |
a7c99304 | 11652 | return; |
230d793d RS |
11653 | } |
11654 | ||
a7c99304 RK |
11655 | /* If this is some other SUBREG, we know it replaces the entire |
11656 | value, so use that as the destination. */ | |
11657 | if (GET_CODE (dest) == SUBREG) | |
11658 | dest = SUBREG_REG (dest); | |
11659 | ||
11660 | /* If this is a MEM, adjust deaths of anything used in the address. | |
11661 | For a REG (the only other possibility), the entire value is | |
11662 | being replaced so the old value is not used in this insn. */ | |
230d793d | 11663 | |
3c0cb5de | 11664 | if (MEM_P (dest)) |
6eb12cef RK |
11665 | move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid, |
11666 | to_insn, pnotes); | |
230d793d RS |
11667 | return; |
11668 | } | |
11669 | ||
11670 | else if (GET_CODE (x) == CLOBBER) | |
11671 | return; | |
11672 | ||
11673 | len = GET_RTX_LENGTH (code); | |
11674 | fmt = GET_RTX_FORMAT (code); | |
11675 | ||
11676 | for (i = 0; i < len; i++) | |
11677 | { | |
11678 | if (fmt[i] == 'E') | |
11679 | { | |
b3694847 | 11680 | int j; |
230d793d | 11681 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
6eb12cef RK |
11682 | move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid, |
11683 | to_insn, pnotes); | |
230d793d RS |
11684 | } |
11685 | else if (fmt[i] == 'e') | |
6eb12cef | 11686 | move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes); |
230d793d RS |
11687 | } |
11688 | } | |
11689 | \f | |
a7c99304 RK |
11690 | /* Return 1 if X is the target of a bit-field assignment in BODY, the |
11691 | pattern of an insn. X must be a REG. */ | |
230d793d RS |
11692 | |
11693 | static int | |
79a490a9 | 11694 | reg_bitfield_target_p (rtx x, rtx body) |
230d793d RS |
11695 | { |
11696 | int i; | |
11697 | ||
11698 | if (GET_CODE (body) == SET) | |
a7c99304 RK |
11699 | { |
11700 | rtx dest = SET_DEST (body); | |
11701 | rtx target; | |
770ae6cc | 11702 | unsigned int regno, tregno, endregno, endtregno; |
a7c99304 RK |
11703 | |
11704 | if (GET_CODE (dest) == ZERO_EXTRACT) | |
11705 | target = XEXP (dest, 0); | |
11706 | else if (GET_CODE (dest) == STRICT_LOW_PART) | |
11707 | target = SUBREG_REG (XEXP (dest, 0)); | |
11708 | else | |
11709 | return 0; | |
11710 | ||
11711 | if (GET_CODE (target) == SUBREG) | |
11712 | target = SUBREG_REG (target); | |
11713 | ||
f8cfc6aa | 11714 | if (!REG_P (target)) |
a7c99304 RK |
11715 | return 0; |
11716 | ||
11717 | tregno = REGNO (target), regno = REGNO (x); | |
11718 | if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER) | |
11719 | return target == x; | |
11720 | ||
66fd46b6 JH |
11721 | endtregno = tregno + hard_regno_nregs[tregno][GET_MODE (target)]; |
11722 | endregno = regno + hard_regno_nregs[regno][GET_MODE (x)]; | |
a7c99304 RK |
11723 | |
11724 | return endregno > tregno && regno < endtregno; | |
11725 | } | |
230d793d RS |
11726 | |
11727 | else if (GET_CODE (body) == PARALLEL) | |
11728 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
a7c99304 | 11729 | if (reg_bitfield_target_p (x, XVECEXP (body, 0, i))) |
230d793d RS |
11730 | return 1; |
11731 | ||
11732 | return 0; | |
663522cb | 11733 | } |
230d793d RS |
11734 | \f |
11735 | /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them | |
11736 | as appropriate. I3 and I2 are the insns resulting from the combination | |
11737 | insns including FROM (I2 may be zero). | |
11738 | ||
230d793d RS |
11739 | Each note in the list is either ignored or placed on some insns, depending |
11740 | on the type of note. */ | |
11741 | ||
11742 | static void | |
79a490a9 | 11743 | distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2) |
230d793d RS |
11744 | { |
11745 | rtx note, next_note; | |
11746 | rtx tem; | |
11747 | ||
11748 | for (note = notes; note; note = next_note) | |
11749 | { | |
11750 | rtx place = 0, place2 = 0; | |
11751 | ||
11752 | /* If this NOTE references a pseudo register, ensure it references | |
11753 | the latest copy of that register. */ | |
f8cfc6aa | 11754 | if (XEXP (note, 0) && REG_P (XEXP (note, 0)) |
230d793d RS |
11755 | && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER) |
11756 | XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))]; | |
11757 | ||
11758 | next_note = XEXP (note, 1); | |
11759 | switch (REG_NOTE_KIND (note)) | |
11760 | { | |
c9903b44 | 11761 | case REG_BR_PROB: |
4db384c9 | 11762 | case REG_BR_PRED: |
c9903b44 DE |
11763 | /* Doesn't matter much where we put this, as long as it's somewhere. |
11764 | It is preferable to keep these notes on branches, which is most | |
11765 | likely to be i3. */ | |
4a8d0c9c RH |
11766 | place = i3; |
11767 | break; | |
11768 | ||
6e885ee3 ZD |
11769 | case REG_VALUE_PROFILE: |
11770 | /* Just get rid of this note, as it is unused later anyway. */ | |
11771 | break; | |
11772 | ||
f7cfa78d | 11773 | case REG_NON_LOCAL_GOTO: |
4b4bf941 | 11774 | if (JUMP_P (i3)) |
f7cfa78d | 11775 | place = i3; |
f7cfa78d | 11776 | else |
341c100f NS |
11777 | { |
11778 | gcc_assert (i2 && JUMP_P (i2)); | |
11779 | place = i2; | |
11780 | } | |
f7cfa78d GS |
11781 | break; |
11782 | ||
4b7c585f | 11783 | case REG_EH_REGION: |
662795a8 | 11784 | /* These notes must remain with the call or trapping instruction. */ |
4b4bf941 | 11785 | if (CALL_P (i3)) |
662795a8 | 11786 | place = i3; |
4b4bf941 | 11787 | else if (i2 && CALL_P (i2)) |
662795a8 | 11788 | place = i2; |
341c100f | 11789 | else |
662795a8 | 11790 | { |
341c100f | 11791 | gcc_assert (flag_non_call_exceptions); |
662795a8 RH |
11792 | if (may_trap_p (i3)) |
11793 | place = i3; | |
11794 | else if (i2 && may_trap_p (i2)) | |
11795 | place = i2; | |
11796 | /* ??? Otherwise assume we've combined things such that we | |
11797 | can now prove that the instructions can't trap. Drop the | |
11798 | note in this case. */ | |
11799 | } | |
662795a8 RH |
11800 | break; |
11801 | ||
b01e3e38 | 11802 | case REG_ALWAYS_RETURN: |
ca3920ad | 11803 | case REG_NORETURN: |
ab61c93f | 11804 | case REG_SETJMP: |
0e403ec3 AS |
11805 | /* These notes must remain with the call. It should not be |
11806 | possible for both I2 and I3 to be a call. */ | |
4b4bf941 | 11807 | if (CALL_P (i3)) |
4b7c585f | 11808 | place = i3; |
4b7c585f | 11809 | else |
341c100f NS |
11810 | { |
11811 | gcc_assert (i2 && CALL_P (i2)); | |
11812 | place = i2; | |
11813 | } | |
4b7c585f JL |
11814 | break; |
11815 | ||
230d793d | 11816 | case REG_UNUSED: |
07d0cbdd | 11817 | /* Any clobbers for i3 may still exist, and so we must process |
176c9e6b JW |
11818 | REG_UNUSED notes from that insn. |
11819 | ||
11820 | Any clobbers from i2 or i1 can only exist if they were added by | |
11821 | recog_for_combine. In that case, recog_for_combine created the | |
11822 | necessary REG_UNUSED notes. Trying to keep any original | |
11823 | REG_UNUSED notes from these insns can cause incorrect output | |
11824 | if it is for the same register as the original i3 dest. | |
11825 | In that case, we will notice that the register is set in i3, | |
11826 | and then add a REG_UNUSED note for the destination of i3, which | |
07d0cbdd JW |
11827 | is wrong. However, it is possible to have REG_UNUSED notes from |
11828 | i2 or i1 for register which were both used and clobbered, so | |
11829 | we keep notes from i2 or i1 if they will turn into REG_DEAD | |
11830 | notes. */ | |
176c9e6b | 11831 | |
230d793d RS |
11832 | /* If this register is set or clobbered in I3, put the note there |
11833 | unless there is one already. */ | |
07d0cbdd | 11834 | if (reg_set_p (XEXP (note, 0), PATTERN (i3))) |
230d793d | 11835 | { |
07d0cbdd JW |
11836 | if (from_insn != i3) |
11837 | break; | |
11838 | ||
f8cfc6aa | 11839 | if (! (REG_P (XEXP (note, 0)) |
230d793d RS |
11840 | ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0))) |
11841 | : find_reg_note (i3, REG_UNUSED, XEXP (note, 0)))) | |
11842 | place = i3; | |
11843 | } | |
11844 | /* Otherwise, if this register is used by I3, then this register | |
11845 | now dies here, so we must put a REG_DEAD note here unless there | |
11846 | is one already. */ | |
11847 | else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)) | |
f8cfc6aa | 11848 | && ! (REG_P (XEXP (note, 0)) |
770ae6cc RK |
11849 | ? find_regno_note (i3, REG_DEAD, |
11850 | REGNO (XEXP (note, 0))) | |
230d793d RS |
11851 | : find_reg_note (i3, REG_DEAD, XEXP (note, 0)))) |
11852 | { | |
11853 | PUT_REG_NOTE_KIND (note, REG_DEAD); | |
11854 | place = i3; | |
11855 | } | |
11856 | break; | |
11857 | ||
11858 | case REG_EQUAL: | |
11859 | case REG_EQUIV: | |
9ae8ffe7 | 11860 | case REG_NOALIAS: |
230d793d RS |
11861 | /* These notes say something about results of an insn. We can |
11862 | only support them if they used to be on I3 in which case they | |
a687e897 RK |
11863 | remain on I3. Otherwise they are ignored. |
11864 | ||
11865 | If the note refers to an expression that is not a constant, we | |
11866 | must also ignore the note since we cannot tell whether the | |
11867 | equivalence is still true. It might be possible to do | |
11868 | slightly better than this (we only have a problem if I2DEST | |
11869 | or I1DEST is present in the expression), but it doesn't | |
11870 | seem worth the trouble. */ | |
11871 | ||
11872 | if (from_insn == i3 | |
11873 | && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0)))) | |
230d793d RS |
11874 | place = i3; |
11875 | break; | |
11876 | ||
11877 | case REG_INC: | |
11878 | case REG_NO_CONFLICT: | |
230d793d RS |
11879 | /* These notes say something about how a register is used. They must |
11880 | be present on any use of the register in I2 or I3. */ | |
11881 | if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))) | |
11882 | place = i3; | |
11883 | ||
11884 | if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2))) | |
11885 | { | |
11886 | if (place) | |
11887 | place2 = i2; | |
11888 | else | |
11889 | place = i2; | |
11890 | } | |
11891 | break; | |
11892 | ||
e55b4486 RH |
11893 | case REG_LABEL: |
11894 | /* This can show up in several ways -- either directly in the | |
11895 | pattern, or hidden off in the constant pool with (or without?) | |
11896 | a REG_EQUAL note. */ | |
11897 | /* ??? Ignore the without-reg_equal-note problem for now. */ | |
11898 | if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)) | |
11899 | || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX)) | |
11900 | && GET_CODE (XEXP (tem, 0)) == LABEL_REF | |
11901 | && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))) | |
11902 | place = i3; | |
11903 | ||
11904 | if (i2 | |
11905 | && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2)) | |
663522cb | 11906 | || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX)) |
e55b4486 RH |
11907 | && GET_CODE (XEXP (tem, 0)) == LABEL_REF |
11908 | && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))) | |
11909 | { | |
11910 | if (place) | |
11911 | place2 = i2; | |
11912 | else | |
11913 | place = i2; | |
11914 | } | |
2a3b43b6 | 11915 | |
b54567e2 RZ |
11916 | /* Don't attach REG_LABEL note to a JUMP_INSN. Add |
11917 | a JUMP_LABEL instead or decrement LABEL_NUSES. */ | |
11918 | if (place && JUMP_P (place)) | |
2a3b43b6 | 11919 | { |
341c100f NS |
11920 | rtx label = JUMP_LABEL (place); |
11921 | ||
11922 | if (!label) | |
b54567e2 | 11923 | JUMP_LABEL (place) = XEXP (note, 0); |
341c100f NS |
11924 | else |
11925 | { | |
11926 | gcc_assert (label == XEXP (note, 0)); | |
11927 | if (LABEL_P (label)) | |
11928 | LABEL_NUSES (label)--; | |
11929 | } | |
2a3b43b6 JJ |
11930 | place = 0; |
11931 | } | |
b54567e2 | 11932 | if (place2 && JUMP_P (place2)) |
2a3b43b6 | 11933 | { |
341c100f NS |
11934 | rtx label = JUMP_LABEL (place2); |
11935 | ||
11936 | if (!label) | |
b54567e2 | 11937 | JUMP_LABEL (place2) = XEXP (note, 0); |
341c100f NS |
11938 | else |
11939 | { | |
11940 | gcc_assert (label == XEXP (note, 0)); | |
11941 | if (LABEL_P (label)) | |
11942 | LABEL_NUSES (label)--; | |
11943 | } | |
2a3b43b6 JJ |
11944 | place2 = 0; |
11945 | } | |
e55b4486 RH |
11946 | break; |
11947 | ||
c1194d74 | 11948 | case REG_NONNEG: |
6001794d | 11949 | /* This note says something about the value of a register prior |
c1194d74 JW |
11950 | to the execution of an insn. It is too much trouble to see |
11951 | if the note is still correct in all situations. It is better | |
11952 | to simply delete it. */ | |
230d793d RS |
11953 | break; |
11954 | ||
11955 | case REG_RETVAL: | |
11956 | /* If the insn previously containing this note still exists, | |
11957 | put it back where it was. Otherwise move it to the previous | |
11958 | insn. Adjust the corresponding REG_LIBCALL note. */ | |
4b4bf941 | 11959 | if (!NOTE_P (from_insn)) |
230d793d RS |
11960 | place = from_insn; |
11961 | else | |
11962 | { | |
5f4f0e22 | 11963 | tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX); |
230d793d RS |
11964 | place = prev_real_insn (from_insn); |
11965 | if (tem && place) | |
11966 | XEXP (tem, 0) = place; | |
c71e1201 AO |
11967 | /* If we're deleting the last remaining instruction of a |
11968 | libcall sequence, don't add the notes. */ | |
11969 | else if (XEXP (note, 0) == from_insn) | |
11970 | tem = place = 0; | |
e51f9159 KK |
11971 | /* Don't add the dangling REG_RETVAL note. */ |
11972 | else if (! tem) | |
11973 | place = 0; | |
230d793d RS |
11974 | } |
11975 | break; | |
11976 | ||
11977 | case REG_LIBCALL: | |
11978 | /* This is handled similarly to REG_RETVAL. */ | |
4b4bf941 | 11979 | if (!NOTE_P (from_insn)) |
230d793d RS |
11980 | place = from_insn; |
11981 | else | |
11982 | { | |
5f4f0e22 | 11983 | tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX); |
230d793d RS |
11984 | place = next_real_insn (from_insn); |
11985 | if (tem && place) | |
11986 | XEXP (tem, 0) = place; | |
c71e1201 AO |
11987 | /* If we're deleting the last remaining instruction of a |
11988 | libcall sequence, don't add the notes. */ | |
11989 | else if (XEXP (note, 0) == from_insn) | |
11990 | tem = place = 0; | |
e51f9159 KK |
11991 | /* Don't add the dangling REG_LIBCALL note. */ |
11992 | else if (! tem) | |
11993 | place = 0; | |
230d793d RS |
11994 | } |
11995 | break; | |
11996 | ||
11997 | case REG_DEAD: | |
11998 | /* If the register is used as an input in I3, it dies there. | |
da7d8304 | 11999 | Similarly for I2, if it is nonzero and adjacent to I3. |
230d793d RS |
12000 | |
12001 | If the register is not used as an input in either I3 or I2 | |
12002 | and it is not one of the registers we were supposed to eliminate, | |
12003 | there are two possibilities. We might have a non-adjacent I2 | |
12004 | or we might have somehow eliminated an additional register | |
12005 | from a computation. For example, we might have had A & B where | |
12006 | we discover that B will always be zero. In this case we will | |
12007 | eliminate the reference to A. | |
12008 | ||
12009 | In both cases, we must search to see if we can find a previous | |
12010 | use of A and put the death note there. */ | |
12011 | ||
6e2d1486 | 12012 | if (from_insn |
4b4bf941 | 12013 | && CALL_P (from_insn) |
663522cb | 12014 | && find_reg_fusage (from_insn, USE, XEXP (note, 0))) |
6e2d1486 RK |
12015 | place = from_insn; |
12016 | else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))) | |
230d793d RS |
12017 | place = i3; |
12018 | else if (i2 != 0 && next_nonnote_insn (i2) == i3 | |
12019 | && reg_referenced_p (XEXP (note, 0), PATTERN (i2))) | |
12020 | place = i2; | |
12021 | ||
230d793d | 12022 | if (place == 0) |
38d8473f | 12023 | { |
f6366fc7 | 12024 | basic_block bb = this_basic_block; |
d3a923ee RH |
12025 | |
12026 | for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem)) | |
38d8473f | 12027 | { |
2c3c49de | 12028 | if (! INSN_P (tem)) |
d3a923ee | 12029 | { |
a813c111 | 12030 | if (tem == BB_HEAD (bb)) |
d3a923ee RH |
12031 | break; |
12032 | continue; | |
12033 | } | |
12034 | ||
38d8473f RK |
12035 | /* If the register is being set at TEM, see if that is all |
12036 | TEM is doing. If so, delete TEM. Otherwise, make this | |
de7c79cc EC |
12037 | into a REG_UNUSED note instead. Don't delete sets to |
12038 | global register vars. */ | |
2cd54c2a ZW |
12039 | if ((REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER |
12040 | || !global_regs[REGNO (XEXP (note, 0))]) | |
12041 | && reg_set_p (XEXP (note, 0), PATTERN (tem))) | |
38d8473f RK |
12042 | { |
12043 | rtx set = single_set (tem); | |
e5e809f4 | 12044 | rtx inner_dest = 0; |
e51712db | 12045 | #ifdef HAVE_cc0 |
f5c97640 | 12046 | rtx cc0_setter = NULL_RTX; |
e51712db | 12047 | #endif |
e5e809f4 JL |
12048 | |
12049 | if (set != 0) | |
12050 | for (inner_dest = SET_DEST (set); | |
663522cb KH |
12051 | (GET_CODE (inner_dest) == STRICT_LOW_PART |
12052 | || GET_CODE (inner_dest) == SUBREG | |
12053 | || GET_CODE (inner_dest) == ZERO_EXTRACT); | |
e5e809f4 JL |
12054 | inner_dest = XEXP (inner_dest, 0)) |
12055 | ; | |
38d8473f RK |
12056 | |
12057 | /* Verify that it was the set, and not a clobber that | |
663522cb | 12058 | modified the register. |
f5c97640 RH |
12059 | |
12060 | CC0 targets must be careful to maintain setter/user | |
12061 | pairs. If we cannot delete the setter due to side | |
12062 | effects, mark the user with an UNUSED note instead | |
12063 | of deleting it. */ | |
38d8473f RK |
12064 | |
12065 | if (set != 0 && ! side_effects_p (SET_SRC (set)) | |
f5c97640 RH |
12066 | && rtx_equal_p (XEXP (note, 0), inner_dest) |
12067 | #ifdef HAVE_cc0 | |
12068 | && (! reg_mentioned_p (cc0_rtx, SET_SRC (set)) | |
12069 | || ((cc0_setter = prev_cc0_setter (tem)) != NULL | |
12070 | && sets_cc0_p (PATTERN (cc0_setter)) > 0)) | |
12071 | #endif | |
12072 | ) | |
38d8473f RK |
12073 | { |
12074 | /* Move the notes and links of TEM elsewhere. | |
663522cb | 12075 | This might delete other dead insns recursively. |
38d8473f RK |
12076 | First set the pattern to something that won't use |
12077 | any register. */ | |
f9af39d0 | 12078 | rtx old_notes = REG_NOTES (tem); |
38d8473f RK |
12079 | |
12080 | PATTERN (tem) = pc_rtx; | |
f9af39d0 | 12081 | REG_NOTES (tem) = NULL; |
38d8473f | 12082 | |
f9af39d0 | 12083 | distribute_notes (old_notes, tem, tem, NULL_RTX); |
38d8473f RK |
12084 | distribute_links (LOG_LINKS (tem)); |
12085 | ||
6773e15f | 12086 | SET_INSN_DELETED (tem); |
f5c97640 RH |
12087 | |
12088 | #ifdef HAVE_cc0 | |
12089 | /* Delete the setter too. */ | |
12090 | if (cc0_setter) | |
12091 | { | |
12092 | PATTERN (cc0_setter) = pc_rtx; | |
f9af39d0 RE |
12093 | old_notes = REG_NOTES (cc0_setter); |
12094 | REG_NOTES (cc0_setter) = NULL; | |
f5c97640 | 12095 | |
f9af39d0 RE |
12096 | distribute_notes (old_notes, cc0_setter, |
12097 | cc0_setter, NULL_RTX); | |
f5c97640 RH |
12098 | distribute_links (LOG_LINKS (cc0_setter)); |
12099 | ||
6773e15f | 12100 | SET_INSN_DELETED (cc0_setter); |
f5c97640 RH |
12101 | } |
12102 | #endif | |
38d8473f RK |
12103 | } |
12104 | else | |
12105 | { | |
12106 | PUT_REG_NOTE_KIND (note, REG_UNUSED); | |
663522cb | 12107 | |
38d8473f | 12108 | /* If there isn't already a REG_UNUSED note, put one |
b30e1617 DJ |
12109 | here. Do not place a REG_DEAD note, even if |
12110 | the register is also used here; that would not | |
12111 | match the algorithm used in lifetime analysis | |
12112 | and can cause the consistency check in the | |
12113 | scheduler to fail. */ | |
38d8473f RK |
12114 | if (! find_regno_note (tem, REG_UNUSED, |
12115 | REGNO (XEXP (note, 0)))) | |
12116 | place = tem; | |
12117 | break; | |
d3a923ee RH |
12118 | } |
12119 | } | |
12120 | else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)) | |
4b4bf941 | 12121 | || (CALL_P (tem) |
d3a923ee RH |
12122 | && find_reg_fusage (tem, USE, XEXP (note, 0)))) |
12123 | { | |
12124 | place = tem; | |
12125 | ||
12126 | /* If we are doing a 3->2 combination, and we have a | |
12127 | register which formerly died in i3 and was not used | |
12128 | by i2, which now no longer dies in i3 and is used in | |
12129 | i2 but does not die in i2, and place is between i2 | |
12130 | and i3, then we may need to move a link from place to | |
12131 | i2. */ | |
12132 | if (i2 && INSN_UID (place) <= max_uid_cuid | |
12133 | && INSN_CUID (place) > INSN_CUID (i2) | |
663522cb KH |
12134 | && from_insn |
12135 | && INSN_CUID (from_insn) > INSN_CUID (i2) | |
d3a923ee RH |
12136 | && reg_referenced_p (XEXP (note, 0), PATTERN (i2))) |
12137 | { | |
12138 | rtx links = LOG_LINKS (place); | |
12139 | LOG_LINKS (place) = 0; | |
12140 | distribute_links (links); | |
12141 | } | |
12142 | break; | |
12143 | } | |
12144 | ||
a813c111 | 12145 | if (tem == BB_HEAD (bb)) |
230d793d | 12146 | break; |
38d8473f | 12147 | } |
663522cb | 12148 | |
d3a923ee RH |
12149 | /* We haven't found an insn for the death note and it |
12150 | is still a REG_DEAD note, but we have hit the beginning | |
12151 | of the block. If the existing life info says the reg | |
715e7fbc | 12152 | was dead, there's nothing left to do. Otherwise, we'll |
e7139885 RH |
12153 | need to do a global life update after combine. */ |
12154 | if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 | |
12155 | && REGNO_REG_SET_P (bb->global_live_at_start, | |
12156 | REGNO (XEXP (note, 0)))) | |
4977bab6 | 12157 | SET_BIT (refresh_blocks, this_basic_block->index); |
38d8473f | 12158 | } |
230d793d RS |
12159 | |
12160 | /* If the register is set or already dead at PLACE, we needn't do | |
e5e809f4 | 12161 | anything with this note if it is still a REG_DEAD note. |
e8679703 | 12162 | We check here if it is set at all, not if is it totally replaced, |
e5e809f4 JL |
12163 | which is what `dead_or_set_p' checks, so also check for it being |
12164 | set partially. */ | |
12165 | ||
230d793d RS |
12166 | if (place && REG_NOTE_KIND (note) == REG_DEAD) |
12167 | { | |
770ae6cc | 12168 | unsigned int regno = REGNO (XEXP (note, 0)); |
230d793d | 12169 | |
e7139885 RH |
12170 | /* Similarly, if the instruction on which we want to place |
12171 | the note is a noop, we'll need do a global live update | |
12172 | after we remove them in delete_noop_moves. */ | |
12173 | if (noop_move_p (place)) | |
4977bab6 | 12174 | SET_BIT (refresh_blocks, this_basic_block->index); |
e7139885 | 12175 | |
230d793d RS |
12176 | if (dead_or_set_p (place, XEXP (note, 0)) |
12177 | || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place))) | |
12178 | { | |
12179 | /* Unless the register previously died in PLACE, clear | |
5eaad481 | 12180 | last_death. [I no longer understand why this is |
230d793d | 12181 | being done.] */ |
5eaad481 PB |
12182 | if (reg_stat[regno].last_death != place) |
12183 | reg_stat[regno].last_death = 0; | |
230d793d RS |
12184 | place = 0; |
12185 | } | |
12186 | else | |
5eaad481 | 12187 | reg_stat[regno].last_death = place; |
230d793d RS |
12188 | |
12189 | /* If this is a death note for a hard reg that is occupying | |
12190 | multiple registers, ensure that we are still using all | |
12191 | parts of the object. If we find a piece of the object | |
03afaf36 R |
12192 | that is unused, we must arrange for an appropriate REG_DEAD |
12193 | note to be added for it. However, we can't just emit a USE | |
12194 | and tag the note to it, since the register might actually | |
12195 | be dead; so we recourse, and the recursive call then finds | |
12196 | the previous insn that used this register. */ | |
230d793d RS |
12197 | |
12198 | if (place && regno < FIRST_PSEUDO_REGISTER | |
66fd46b6 | 12199 | && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] > 1) |
230d793d | 12200 | { |
770ae6cc | 12201 | unsigned int endregno |
66fd46b6 JH |
12202 | = regno + hard_regno_nregs[regno] |
12203 | [GET_MODE (XEXP (note, 0))]; | |
230d793d | 12204 | int all_used = 1; |
770ae6cc | 12205 | unsigned int i; |
230d793d RS |
12206 | |
12207 | for (i = regno; i < endregno; i++) | |
03afaf36 R |
12208 | if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0) |
12209 | && ! find_regno_fusage (place, USE, i)) | |
12210 | || dead_or_set_regno_p (place, i)) | |
12211 | all_used = 0; | |
a394b17b | 12212 | |
230d793d RS |
12213 | if (! all_used) |
12214 | { | |
12215 | /* Put only REG_DEAD notes for pieces that are | |
03afaf36 | 12216 | not already dead or set. */ |
230d793d | 12217 | |
03afaf36 | 12218 | for (i = regno; i < endregno; |
66fd46b6 | 12219 | i += hard_regno_nregs[i][reg_raw_mode[i]]) |
230d793d | 12220 | { |
e50126e8 | 12221 | rtx piece = regno_reg_rtx[i]; |
f6366fc7 | 12222 | basic_block bb = this_basic_block; |
230d793d | 12223 | |
03afaf36 | 12224 | if (! dead_or_set_p (place, piece) |
230d793d RS |
12225 | && ! reg_bitfield_target_p (piece, |
12226 | PATTERN (place))) | |
03afaf36 R |
12227 | { |
12228 | rtx new_note | |
12229 | = gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX); | |
12230 | ||
12231 | distribute_notes (new_note, place, place, | |
72531479 | 12232 | NULL_RTX); |
03afaf36 | 12233 | } |
c762163e R |
12234 | else if (! refers_to_regno_p (i, i + 1, |
12235 | PATTERN (place), 0) | |
12236 | && ! find_regno_fusage (place, USE, i)) | |
12237 | for (tem = PREV_INSN (place); ; | |
12238 | tem = PREV_INSN (tem)) | |
12239 | { | |
12240 | if (! INSN_P (tem)) | |
12241 | { | |
a813c111 | 12242 | if (tem == BB_HEAD (bb)) |
c762163e R |
12243 | { |
12244 | SET_BIT (refresh_blocks, | |
f6366fc7 | 12245 | this_basic_block->index); |
c762163e R |
12246 | break; |
12247 | } | |
12248 | continue; | |
12249 | } | |
12250 | if (dead_or_set_p (tem, piece) | |
12251 | || reg_bitfield_target_p (piece, | |
12252 | PATTERN (tem))) | |
12253 | { | |
12254 | REG_NOTES (tem) | |
71fd5a51 | 12255 | = gen_rtx_EXPR_LIST (REG_UNUSED, piece, |
c762163e R |
12256 | REG_NOTES (tem)); |
12257 | break; | |
12258 | } | |
12259 | } | |
12260 | ||
230d793d RS |
12261 | } |
12262 | ||
12263 | place = 0; | |
12264 | } | |
12265 | } | |
12266 | } | |
12267 | break; | |
12268 | ||
12269 | default: | |
12270 | /* Any other notes should not be present at this point in the | |
12271 | compilation. */ | |
341c100f | 12272 | gcc_unreachable (); |
230d793d RS |
12273 | } |
12274 | ||
12275 | if (place) | |
12276 | { | |
12277 | XEXP (note, 1) = REG_NOTES (place); | |
12278 | REG_NOTES (place) = note; | |
12279 | } | |
1a26b032 RK |
12280 | else if ((REG_NOTE_KIND (note) == REG_DEAD |
12281 | || REG_NOTE_KIND (note) == REG_UNUSED) | |
f8cfc6aa | 12282 | && REG_P (XEXP (note, 0))) |
b1f21e0a | 12283 | REG_N_DEATHS (REGNO (XEXP (note, 0)))--; |
230d793d RS |
12284 | |
12285 | if (place2) | |
1a26b032 RK |
12286 | { |
12287 | if ((REG_NOTE_KIND (note) == REG_DEAD | |
12288 | || REG_NOTE_KIND (note) == REG_UNUSED) | |
f8cfc6aa | 12289 | && REG_P (XEXP (note, 0))) |
b1f21e0a | 12290 | REG_N_DEATHS (REGNO (XEXP (note, 0)))++; |
1a26b032 | 12291 | |
38a448ca RH |
12292 | REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note), |
12293 | REG_NOTE_KIND (note), | |
12294 | XEXP (note, 0), | |
12295 | REG_NOTES (place2)); | |
1a26b032 | 12296 | } |
230d793d RS |
12297 | } |
12298 | } | |
12299 | \f | |
12300 | /* Similarly to above, distribute the LOG_LINKS that used to be present on | |
8c03ca00 EB |
12301 | I3, I2, and I1 to new locations. This is also called to add a link |
12302 | pointing at I3 when I3's destination is changed. */ | |
230d793d RS |
12303 | |
12304 | static void | |
79a490a9 | 12305 | distribute_links (rtx links) |
230d793d RS |
12306 | { |
12307 | rtx link, next_link; | |
12308 | ||
12309 | for (link = links; link; link = next_link) | |
12310 | { | |
12311 | rtx place = 0; | |
12312 | rtx insn; | |
12313 | rtx set, reg; | |
12314 | ||
12315 | next_link = XEXP (link, 1); | |
12316 | ||
12317 | /* If the insn that this link points to is a NOTE or isn't a single | |
12318 | set, ignore it. In the latter case, it isn't clear what we | |
663522cb | 12319 | can do other than ignore the link, since we can't tell which |
230d793d RS |
12320 | register it was for. Such links wouldn't be used by combine |
12321 | anyway. | |
12322 | ||
12323 | It is not possible for the destination of the target of the link to | |
12324 | have been changed by combine. The only potential of this is if we | |
12325 | replace I3, I2, and I1 by I3 and I2. But in that case the | |
12326 | destination of I2 also remains unchanged. */ | |
12327 | ||
4b4bf941 | 12328 | if (NOTE_P (XEXP (link, 0)) |
230d793d RS |
12329 | || (set = single_set (XEXP (link, 0))) == 0) |
12330 | continue; | |
12331 | ||
12332 | reg = SET_DEST (set); | |
12333 | while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT | |
230d793d RS |
12334 | || GET_CODE (reg) == STRICT_LOW_PART) |
12335 | reg = XEXP (reg, 0); | |
12336 | ||
12337 | /* A LOG_LINK is defined as being placed on the first insn that uses | |
12338 | a register and points to the insn that sets the register. Start | |
12339 | searching at the next insn after the target of the link and stop | |
12340 | when we reach a set of the register or the end of the basic block. | |
12341 | ||
12342 | Note that this correctly handles the link that used to point from | |
5089e22e | 12343 | I3 to I2. Also note that not much searching is typically done here |
230d793d RS |
12344 | since most links don't point very far away. */ |
12345 | ||
12346 | for (insn = NEXT_INSN (XEXP (link, 0)); | |
f6366fc7 | 12347 | (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR |
a813c111 | 12348 | || BB_HEAD (this_basic_block->next_bb) != insn)); |
230d793d | 12349 | insn = NEXT_INSN (insn)) |
2c3c49de | 12350 | if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn))) |
230d793d RS |
12351 | { |
12352 | if (reg_referenced_p (reg, PATTERN (insn))) | |
12353 | place = insn; | |
12354 | break; | |
12355 | } | |
4b4bf941 | 12356 | else if (CALL_P (insn) |
663522cb | 12357 | && find_reg_fusage (insn, USE, reg)) |
6e2d1486 RK |
12358 | { |
12359 | place = insn; | |
12360 | break; | |
12361 | } | |
892c9f1f RK |
12362 | else if (INSN_P (insn) && reg_set_p (reg, insn)) |
12363 | break; | |
230d793d RS |
12364 | |
12365 | /* If we found a place to put the link, place it there unless there | |
12366 | is already a link to the same insn as LINK at that point. */ | |
12367 | ||
12368 | if (place) | |
12369 | { | |
12370 | rtx link2; | |
12371 | ||
12372 | for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1)) | |
12373 | if (XEXP (link2, 0) == XEXP (link, 0)) | |
12374 | break; | |
12375 | ||
12376 | if (link2 == 0) | |
12377 | { | |
12378 | XEXP (link, 1) = LOG_LINKS (place); | |
12379 | LOG_LINKS (place) = link; | |
abe6e52f RK |
12380 | |
12381 | /* Set added_links_insn to the earliest insn we added a | |
12382 | link to. */ | |
663522cb | 12383 | if (added_links_insn == 0 |
abe6e52f RK |
12384 | || INSN_CUID (added_links_insn) > INSN_CUID (place)) |
12385 | added_links_insn = place; | |
230d793d RS |
12386 | } |
12387 | } | |
12388 | } | |
12389 | } | |
12390 | \f | |
67962db5 RS |
12391 | /* Subroutine of unmentioned_reg_p and callback from for_each_rtx. |
12392 | Check whether the expression pointer to by LOC is a register or | |
12393 | memory, and if so return 1 if it isn't mentioned in the rtx EXPR. | |
12394 | Otherwise return zero. */ | |
12395 | ||
12396 | static int | |
12397 | unmentioned_reg_p_1 (rtx *loc, void *expr) | |
12398 | { | |
12399 | rtx x = *loc; | |
12400 | ||
12401 | if (x != NULL_RTX | |
3c0cb5de | 12402 | && (REG_P (x) || MEM_P (x)) |
67962db5 RS |
12403 | && ! reg_mentioned_p (x, (rtx) expr)) |
12404 | return 1; | |
12405 | return 0; | |
12406 | } | |
12407 | ||
12408 | /* Check for any register or memory mentioned in EQUIV that is not | |
12409 | mentioned in EXPR. This is used to restrict EQUIV to "specializations" | |
12410 | of EXPR where some registers may have been replaced by constants. */ | |
12411 | ||
12412 | static bool | |
12413 | unmentioned_reg_p (rtx equiv, rtx expr) | |
12414 | { | |
12415 | return for_each_rtx (&equiv, unmentioned_reg_p_1, expr); | |
12416 | } | |
12417 | \f | |
1427d6d2 RK |
12418 | /* Compute INSN_CUID for INSN, which is an insn made by combine. */ |
12419 | ||
12420 | static int | |
79a490a9 | 12421 | insn_cuid (rtx insn) |
1427d6d2 RK |
12422 | { |
12423 | while (insn != 0 && INSN_UID (insn) > max_uid_cuid | |
4b4bf941 | 12424 | && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE) |
1427d6d2 RK |
12425 | insn = NEXT_INSN (insn); |
12426 | ||
341c100f | 12427 | gcc_assert (INSN_UID (insn) <= max_uid_cuid); |
1427d6d2 RK |
12428 | |
12429 | return INSN_CUID (insn); | |
12430 | } | |
12431 | \f | |
230d793d | 12432 | void |
79a490a9 | 12433 | dump_combine_stats (FILE *file) |
230d793d | 12434 | { |
ab87f8c8 | 12435 | fnotice |
230d793d RS |
12436 | (file, |
12437 | ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n", | |
12438 | combine_attempts, combine_merges, combine_extras, combine_successes); | |
12439 | } | |
12440 | ||
12441 | void | |
79a490a9 | 12442 | dump_combine_total_stats (FILE *file) |
230d793d | 12443 | { |
ab87f8c8 | 12444 | fnotice |
230d793d RS |
12445 | (file, |
12446 | "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n", | |
12447 | total_attempts, total_merges, total_extras, total_successes); | |
12448 | } |