]> gcc.gnu.org Git - gcc.git/blame - gcc/store-motion.c
rs6000.c (rtx_is_swappable_p): Add UNSPEC_VSX_CVDPSPN as an unswappable operand...
[gcc.git] / gcc / store-motion.c
CommitLineData
df35c271 1/* Store motion via Lazy Code Motion on the reverse CFG.
23a5b65a 2 Copyright (C) 1997-2014 Free Software Foundation, Inc.
df35c271
SB
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
718f9c0f 24#include "diagnostic-core.h"
df35c271
SB
25#include "toplev.h"
26
27#include "rtl.h"
28#include "tree.h"
29#include "tm_p.h"
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "flags.h"
df35c271
SB
33#include "insn-config.h"
34#include "recog.h"
35#include "basic-block.h"
df35c271
SB
36#include "function.h"
37#include "expr.h"
38#include "except.h"
39#include "ggc.h"
df35c271 40#include "intl.h"
df35c271 41#include "tree-pass.h"
4a8fb1a1 42#include "hash-table.h"
df35c271
SB
43#include "df.h"
44#include "dbgcnt.h"
638e18a4 45#include "rtl-iter.h"
df35c271 46
6c5d4d1a
SB
47/* This pass implements downward store motion.
48 As of May 1, 2009, the pass is not enabled by default on any target,
49 but bootstrap completes on ia64 and x86_64 with the pass enabled. */
50
51/* TODO:
52 - remove_reachable_equiv_notes is an incomprehensible pile of goo and
53 a compile time hog that needs a rewrite (maybe cache st_exprs to
54 invalidate REG_EQUAL/REG_EQUIV notes for?).
55 - pattern_regs in st_expr should be a regset (on its own obstack).
56 - antic_stores and avail_stores should be VECs instead of lists.
9771b263 57 - store_motion_mems should be a vec instead of a list.
6c5d4d1a
SB
58 - there should be an alloc pool for struct st_expr objects.
59 - investigate whether it is helpful to make the address of an st_expr
60 a cselib VALUE.
61 - when GIMPLE alias information is exported, the effectiveness of this
62 pass should be re-evaluated.
63*/
64
65/* This is a list of store expressions (MEMs). The structure is used
66 as an expression table to track stores which look interesting, and
67 might be moveable towards the exit block. */
68
69struct st_expr
df35c271 70{
6c5d4d1a
SB
71 /* Pattern of this mem. */
72 rtx pattern;
73 /* List of registers mentioned by the mem. */
74 rtx pattern_regs;
75 /* INSN list of stores that are locally anticipatable. */
3dc99c19 76 rtx_insn_list *antic_stores;
6c5d4d1a 77 /* INSN list of stores that are locally available. */
3dc99c19 78 rtx_insn_list *avail_stores;
6c5d4d1a
SB
79 /* Next in the list. */
80 struct st_expr * next;
81 /* Store ID in the dataflow bitmaps. */
82 int index;
83 /* Hash value for the hash table. */
84 unsigned int hash_index;
85 /* Register holding the stored expression when a store is moved.
86 This field is also used as a cache in find_moveable_store, see
87 LAST_AVAIL_CHECK_FAILURE below. */
88 rtx reaching_reg;
df35c271
SB
89};
90
91/* Head of the list of load/store memory refs. */
6c5d4d1a 92static struct st_expr * store_motion_mems = NULL;
df35c271 93
6c5d4d1a
SB
94/* These bitmaps will hold the local dataflow properties per basic block. */
95static sbitmap *st_kill, *st_avloc, *st_antloc, *st_transp;
df35c271
SB
96
97/* Nonzero for expressions which should be inserted on a specific edge. */
6c5d4d1a 98static sbitmap *st_insert_map;
df35c271
SB
99
100/* Nonzero for expressions which should be deleted in a specific block. */
6c5d4d1a
SB
101static sbitmap *st_delete_map;
102
103/* Global holding the number of store expressions we are dealing with. */
104static int num_stores;
df35c271
SB
105
106/* Contains the edge_list returned by pre_edge_lcm. */
107static struct edge_list *edge_list;
108
4a8fb1a1
LC
109/* Hashtable helpers. */
110
111struct st_expr_hasher : typed_noop_remove <st_expr>
112{
113 typedef st_expr value_type;
114 typedef st_expr compare_type;
115 static inline hashval_t hash (const value_type *);
116 static inline bool equal (const value_type *, const compare_type *);
117};
118
119inline hashval_t
120st_expr_hasher::hash (const value_type *x)
df35c271
SB
121{
122 int do_not_record_p = 0;
df35c271
SB
123 return hash_rtx (x->pattern, GET_MODE (x->pattern), &do_not_record_p, NULL, false);
124}
125
4a8fb1a1
LC
126inline bool
127st_expr_hasher::equal (const value_type *ptr1, const compare_type *ptr2)
df35c271 128{
df35c271
SB
129 return exp_equiv_p (ptr1->pattern, ptr2->pattern, 0, true);
130}
131
4a8fb1a1 132/* Hashtable for the load/store memory refs. */
c203e8a7 133static hash_table<st_expr_hasher> *store_motion_mems_table;
4a8fb1a1 134
6c5d4d1a 135/* This will search the st_expr list for a matching expression. If it
df35c271
SB
136 doesn't find one, we create one and initialize it. */
137
6c5d4d1a
SB
138static struct st_expr *
139st_expr_entry (rtx x)
df35c271
SB
140{
141 int do_not_record_p = 0;
6c5d4d1a 142 struct st_expr * ptr;
df35c271 143 unsigned int hash;
4a8fb1a1 144 st_expr **slot;
6c5d4d1a 145 struct st_expr e;
df35c271
SB
146
147 hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
148 NULL, /*have_reg_qty=*/false);
149
150 e.pattern = x;
c203e8a7 151 slot = store_motion_mems_table->find_slot_with_hash (&e, hash, INSERT);
df35c271 152 if (*slot)
4a8fb1a1 153 return *slot;
df35c271 154
6c5d4d1a 155 ptr = XNEW (struct st_expr);
df35c271 156
6c5d4d1a 157 ptr->next = store_motion_mems;
df35c271
SB
158 ptr->pattern = x;
159 ptr->pattern_regs = NULL_RTX;
3dc99c19
DM
160 ptr->antic_stores = NULL;
161 ptr->avail_stores = NULL;
df35c271 162 ptr->reaching_reg = NULL_RTX;
df35c271
SB
163 ptr->index = 0;
164 ptr->hash_index = hash;
6c5d4d1a 165 store_motion_mems = ptr;
df35c271
SB
166 *slot = ptr;
167
168 return ptr;
169}
170
6c5d4d1a 171/* Free up an individual st_expr entry. */
df35c271
SB
172
173static void
6c5d4d1a 174free_st_expr_entry (struct st_expr * ptr)
df35c271 175{
6c5d4d1a
SB
176 free_INSN_LIST_list (& ptr->antic_stores);
177 free_INSN_LIST_list (& ptr->avail_stores);
df35c271
SB
178
179 free (ptr);
180}
181
6c5d4d1a 182/* Free up all memory associated with the st_expr list. */
df35c271
SB
183
184static void
6c5d4d1a 185free_store_motion_mems (void)
df35c271 186{
c203e8a7
TS
187 delete store_motion_mems_table;
188 store_motion_mems_table = NULL;
df35c271 189
6c5d4d1a 190 while (store_motion_mems)
df35c271 191 {
6c5d4d1a
SB
192 struct st_expr * tmp = store_motion_mems;
193 store_motion_mems = store_motion_mems->next;
194 free_st_expr_entry (tmp);
df35c271 195 }
6c5d4d1a 196 store_motion_mems = NULL;
df35c271
SB
197}
198
199/* Assign each element of the list of mems a monotonically increasing value. */
200
201static int
6c5d4d1a 202enumerate_store_motion_mems (void)
df35c271 203{
6c5d4d1a 204 struct st_expr * ptr;
df35c271
SB
205 int n = 0;
206
6c5d4d1a 207 for (ptr = store_motion_mems; ptr != NULL; ptr = ptr->next)
df35c271
SB
208 ptr->index = n++;
209
210 return n;
211}
212
213/* Return first item in the list. */
214
6c5d4d1a
SB
215static inline struct st_expr *
216first_st_expr (void)
df35c271 217{
6c5d4d1a 218 return store_motion_mems;
df35c271
SB
219}
220
221/* Return the next item in the list after the specified one. */
222
6c5d4d1a
SB
223static inline struct st_expr *
224next_st_expr (struct st_expr * ptr)
df35c271
SB
225{
226 return ptr->next;
227}
228
6c5d4d1a 229/* Dump debugging info about the store_motion_mems list. */
df35c271
SB
230
231static void
6c5d4d1a 232print_store_motion_mems (FILE * file)
df35c271 233{
6c5d4d1a 234 struct st_expr * ptr;
df35c271 235
6c5d4d1a 236 fprintf (dump_file, "STORE_MOTION list of MEM exprs considered:\n");
df35c271 237
6c5d4d1a 238 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
df35c271
SB
239 {
240 fprintf (file, " Pattern (%3d): ", ptr->index);
241
242 print_rtl (file, ptr->pattern);
243
6c5d4d1a 244 fprintf (file, "\n ANTIC stores : ");
df35c271 245
6c5d4d1a
SB
246 if (ptr->antic_stores)
247 print_rtl (file, ptr->antic_stores);
df35c271
SB
248 else
249 fprintf (file, "(nil)");
250
6c5d4d1a 251 fprintf (file, "\n AVAIL stores : ");
df35c271 252
6c5d4d1a
SB
253 if (ptr->avail_stores)
254 print_rtl (file, ptr->avail_stores);
df35c271
SB
255 else
256 fprintf (file, "(nil)");
257
258 fprintf (file, "\n\n");
259 }
260
261 fprintf (file, "\n");
262}
263\f
df35c271
SB
264/* Return zero if some of the registers in list X are killed
265 due to set of registers in bitmap REGS_SET. */
266
267static bool
268store_ops_ok (const_rtx x, int *regs_set)
269{
270 const_rtx reg;
271
272 for (; x; x = XEXP (x, 1))
273 {
274 reg = XEXP (x, 0);
c3284718 275 if (regs_set[REGNO (reg)])
df35c271
SB
276 return false;
277 }
278
279 return true;
280}
281
6c5d4d1a
SB
282/* Returns a list of registers mentioned in X.
283 FIXME: A regset would be prettier and less expensive. */
284
df35c271
SB
285static rtx
286extract_mentioned_regs (rtx x)
287{
6c5d4d1a 288 rtx mentioned_regs = NULL;
638e18a4
RS
289 subrtx_var_iterator::array_type array;
290 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
291 {
292 rtx x = *iter;
293 if (REG_P (x))
294 mentioned_regs = alloc_EXPR_LIST (0, x, mentioned_regs);
295 }
6c5d4d1a 296 return mentioned_regs;
df35c271
SB
297}
298
299/* Check to see if the load X is aliased with STORE_PATTERN.
300 AFTER is true if we are checking the case when STORE_PATTERN occurs
301 after the X. */
302
303static bool
304load_kills_store (const_rtx x, const_rtx store_pattern, int after)
305{
306 if (after)
307 return anti_dependence (x, store_pattern);
308 else
53d9622b 309 return true_dependence (store_pattern, GET_MODE (store_pattern), x);
df35c271
SB
310}
311
6c5d4d1a 312/* Go through the entire rtx X, looking for any loads which might alias
df35c271
SB
313 STORE_PATTERN. Return true if found.
314 AFTER is true if we are checking the case when STORE_PATTERN occurs
315 after the insn X. */
316
317static bool
318find_loads (const_rtx x, const_rtx store_pattern, int after)
319{
320 const char * fmt;
321 int i, j;
322 int ret = false;
323
324 if (!x)
325 return false;
326
327 if (GET_CODE (x) == SET)
328 x = SET_SRC (x);
329
330 if (MEM_P (x))
331 {
332 if (load_kills_store (x, store_pattern, after))
333 return true;
334 }
335
336 /* Recursively process the insn. */
337 fmt = GET_RTX_FORMAT (GET_CODE (x));
338
339 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
340 {
341 if (fmt[i] == 'e')
342 ret |= find_loads (XEXP (x, i), store_pattern, after);
343 else if (fmt[i] == 'E')
344 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
345 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
346 }
347 return ret;
348}
349
350/* Go through pattern PAT looking for any loads which might kill the
351 store in X. Return true if found.
352 AFTER is true if we are checking the case when loads kill X occurs
353 after the insn for PAT. */
354
355static inline bool
356store_killed_in_pat (const_rtx x, const_rtx pat, int after)
357{
358 if (GET_CODE (pat) == SET)
359 {
360 rtx dest = SET_DEST (pat);
361
362 if (GET_CODE (dest) == ZERO_EXTRACT)
363 dest = XEXP (dest, 0);
364
365 /* Check for memory stores to aliased objects. */
366 if (MEM_P (dest)
367 && !exp_equiv_p (dest, x, 0, true))
368 {
369 if (after)
370 {
371 if (output_dependence (dest, x))
372 return true;
373 }
374 else
375 {
376 if (output_dependence (x, dest))
377 return true;
378 }
379 }
380 }
381
382 if (find_loads (pat, x, after))
383 return true;
384
385 return false;
386}
387
388/* Check if INSN kills the store pattern X (is aliased with it).
389 AFTER is true if we are checking the case when store X occurs
390 after the insn. Return true if it does. */
391
392static bool
b4b7724e 393store_killed_in_insn (const_rtx x, const_rtx x_regs, const rtx_insn *insn, int after)
df35c271 394{
9e412ca3 395 const_rtx reg, note, pat;
df35c271 396
2ad1dda0 397 if (! NONDEBUG_INSN_P (insn))
df35c271
SB
398 return false;
399
400 if (CALL_P (insn))
401 {
402 /* A normal or pure call might read from pattern,
403 but a const call will not. */
404 if (!RTL_CONST_CALL_P (insn))
405 return true;
406
407 /* But even a const call reads its parameters. Check whether the
408 base of some of registers used in mem is stack pointer. */
409 for (reg = x_regs; reg; reg = XEXP (reg, 1))
9e412ca3
RS
410 if (may_be_sp_based_p (XEXP (reg, 0)))
411 return true;
df35c271
SB
412
413 return false;
414 }
415
416 pat = PATTERN (insn);
417 if (GET_CODE (pat) == SET)
418 {
419 if (store_killed_in_pat (x, pat, after))
420 return true;
421 }
422 else if (GET_CODE (pat) == PARALLEL)
423 {
424 int i;
425
426 for (i = 0; i < XVECLEN (pat, 0); i++)
427 if (store_killed_in_pat (x, XVECEXP (pat, 0, i), after))
428 return true;
429 }
430 else if (find_loads (PATTERN (insn), x, after))
431 return true;
432
433 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
434 location aliased with X, then this insn kills X. */
435 note = find_reg_equal_equiv_note (insn);
436 if (! note)
437 return false;
438 note = XEXP (note, 0);
439
440 /* However, if the note represents a must alias rather than a may
441 alias relationship, then it does not kill X. */
442 if (exp_equiv_p (note, x, 0, true))
443 return false;
444
445 /* See if there are any aliased loads in the note. */
446 return find_loads (note, x, after);
447}
448
449/* Returns true if the expression X is loaded or clobbered on or after INSN
450 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
451 or after the insn. X_REGS is list of registers mentioned in X. If the store
452 is killed, return the last insn in that it occurs in FAIL_INSN. */
453
454static bool
b4b7724e
DM
455store_killed_after (const_rtx x, const_rtx x_regs, const rtx_insn *insn,
456 const_basic_block bb,
df35c271
SB
457 int *regs_set_after, rtx *fail_insn)
458{
b4b7724e 459 rtx_insn *last = BB_END (bb), *act;
df35c271
SB
460
461 if (!store_ops_ok (x_regs, regs_set_after))
462 {
463 /* We do not know where it will happen. */
464 if (fail_insn)
465 *fail_insn = NULL_RTX;
466 return true;
467 }
468
469 /* Scan from the end, so that fail_insn is determined correctly. */
470 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
471 if (store_killed_in_insn (x, x_regs, act, false))
472 {
473 if (fail_insn)
474 *fail_insn = act;
475 return true;
476 }
477
478 return false;
479}
480
481/* Returns true if the expression X is loaded or clobbered on or before INSN
482 within basic block BB. X_REGS is list of registers mentioned in X.
483 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
484static bool
b4b7724e
DM
485store_killed_before (const_rtx x, const_rtx x_regs, const rtx_insn *insn,
486 const_basic_block bb, int *regs_set_before)
df35c271 487{
b4b7724e 488 rtx_insn *first = BB_HEAD (bb);
df35c271
SB
489
490 if (!store_ops_ok (x_regs, regs_set_before))
491 return true;
492
493 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
494 if (store_killed_in_insn (x, x_regs, insn, true))
495 return true;
496
497 return false;
498}
499
6c5d4d1a
SB
500/* The last insn in the basic block that compute_store_table is processing,
501 where store_killed_after is true for X.
502 Since we go through the basic block from BB_END to BB_HEAD, this is
503 also the available store at the end of the basic block. Therefore
504 this is in effect a cache, to avoid calling store_killed_after for
505 equivalent aliasing store expressions.
506 This value is only meaningful during the computation of the store
507 table. We hi-jack the REACHING_REG field of struct st_expr to save
508 a bit of memory. */
509#define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
510
df35c271
SB
511/* Determine whether INSN is MEM store pattern that we will consider moving.
512 REGS_SET_BEFORE is bitmap of registers set before (and including) the
513 current insn, REGS_SET_AFTER is bitmap of registers set after (and
514 including) the insn in this basic block. We must be passing through BB from
515 head to end, as we are using this fact to speed things up.
516
517 The results are stored this way:
518
6c5d4d1a 519 -- the first anticipatable expression is added into ANTIC_STORES
df35c271
SB
520 -- if the processed expression is not anticipatable, NULL_RTX is added
521 there instead, so that we can use it as indicator that no further
522 expression of this type may be anticipatable
6c5d4d1a 523 -- if the expression is available, it is added as head of AVAIL_STORES;
df35c271
SB
524 consequently, all of them but this head are dead and may be deleted.
525 -- if the expression is not available, the insn due to that it fails to be
6c5d4d1a 526 available is stored in REACHING_REG (via LAST_AVAIL_CHECK_FAILURE).
df35c271
SB
527
528 The things are complicated a bit by fact that there already may be stores
529 to the same MEM from other blocks; also caller must take care of the
530 necessary cleanup of the temporary markers after end of the basic block.
531 */
532
533static void
b4b7724e 534find_moveable_store (rtx_insn *insn, int *regs_set_before, int *regs_set_after)
df35c271 535{
6c5d4d1a 536 struct st_expr * ptr;
3dc99c19 537 rtx dest, set;
df35c271
SB
538 int check_anticipatable, check_available;
539 basic_block bb = BLOCK_FOR_INSN (insn);
540
541 set = single_set (insn);
542 if (!set)
543 return;
544
545 dest = SET_DEST (set);
546
547 if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
548 || GET_MODE (dest) == BLKmode)
549 return;
550
551 if (side_effects_p (dest))
552 return;
553
554 /* If we are handling exceptions, we must be careful with memory references
8f4f502f 555 that may trap. If we are not, the behavior is undefined, so we may just
df35c271 556 continue. */
8f4f502f 557 if (cfun->can_throw_non_call_exceptions && may_trap_p (dest))
df35c271
SB
558 return;
559
560 /* Even if the destination cannot trap, the source may. In this case we'd
561 need to handle updating the REG_EH_REGION note. */
562 if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
563 return;
564
565 /* Make sure that the SET_SRC of this store insns can be assigned to
566 a register, or we will fail later on in replace_store_insn, which
567 assumes that we can do this. But sometimes the target machine has
568 oddities like MEM read-modify-write instruction. See for example
569 PR24257. */
570 if (!can_assign_to_reg_without_clobbers_p (SET_SRC (set)))
571 return;
572
6c5d4d1a 573 ptr = st_expr_entry (dest);
df35c271
SB
574 if (!ptr->pattern_regs)
575 ptr->pattern_regs = extract_mentioned_regs (dest);
576
577 /* Do not check for anticipatability if we either found one anticipatable
578 store already, or tested for one and found out that it was killed. */
579 check_anticipatable = 0;
6c5d4d1a 580 if (!ptr->antic_stores)
df35c271
SB
581 check_anticipatable = 1;
582 else
583 {
3dc99c19 584 rtx_insn *tmp = ptr->antic_stores->insn ();
df35c271
SB
585 if (tmp != NULL_RTX
586 && BLOCK_FOR_INSN (tmp) != bb)
587 check_anticipatable = 1;
588 }
589 if (check_anticipatable)
590 {
3dc99c19 591 rtx_insn *tmp;
df35c271 592 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
3dc99c19 593 tmp = NULL;
df35c271
SB
594 else
595 tmp = insn;
6c5d4d1a 596 ptr->antic_stores = alloc_INSN_LIST (tmp, ptr->antic_stores);
df35c271
SB
597 }
598
599 /* It is not necessary to check whether store is available if we did
600 it successfully before; if we failed before, do not bother to check
601 until we reach the insn that caused us to fail. */
602 check_available = 0;
6c5d4d1a 603 if (!ptr->avail_stores)
df35c271
SB
604 check_available = 1;
605 else
606 {
3dc99c19 607 rtx_insn *tmp = ptr->avail_stores->insn ();
df35c271
SB
608 if (BLOCK_FOR_INSN (tmp) != bb)
609 check_available = 1;
610 }
611 if (check_available)
612 {
613 /* Check that we have already reached the insn at that the check
614 failed last time. */
615 if (LAST_AVAIL_CHECK_FAILURE (ptr))
616 {
3dc99c19 617 rtx_insn *tmp;
df35c271
SB
618 for (tmp = BB_END (bb);
619 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
620 tmp = PREV_INSN (tmp))
621 continue;
622 if (tmp == insn)
623 check_available = 0;
624 }
625 else
626 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
627 bb, regs_set_after,
628 &LAST_AVAIL_CHECK_FAILURE (ptr));
629 }
630 if (!check_available)
6c5d4d1a 631 ptr->avail_stores = alloc_INSN_LIST (insn, ptr->avail_stores);
df35c271
SB
632}
633
634/* Find available and anticipatable stores. */
635
636static int
637compute_store_table (void)
638{
639 int ret;
640 basic_block bb;
2ed1959a 641#ifdef ENABLE_CHECKING
df35c271 642 unsigned regno;
2ed1959a 643#endif
b4b7724e 644 rtx_insn *insn;
3dc99c19 645 rtx_insn *tmp;
bfac633a 646 df_ref def;
df35c271 647 int *last_set_in, *already_set;
6c5d4d1a 648 struct st_expr * ptr, **prev_next_ptr_ptr;
df35c271
SB
649 unsigned int max_gcse_regno = max_reg_num ();
650
6c5d4d1a 651 store_motion_mems = NULL;
c203e8a7 652 store_motion_mems_table = new hash_table<st_expr_hasher> (13);
df35c271
SB
653 last_set_in = XCNEWVEC (int, max_gcse_regno);
654 already_set = XNEWVEC (int, max_gcse_regno);
655
656 /* Find all the stores we care about. */
11cd3bed 657 FOR_EACH_BB_FN (bb, cfun)
df35c271
SB
658 {
659 /* First compute the registers set in this block. */
df35c271
SB
660 FOR_BB_INSNS (bb, insn)
661 {
6c5d4d1a 662
2ad1dda0 663 if (! NONDEBUG_INSN_P (insn))
df35c271
SB
664 continue;
665
bfac633a
RS
666 FOR_EACH_INSN_DEF (def, insn)
667 last_set_in[DF_REF_REGNO (def)] = INSN_UID (insn);
df35c271
SB
668 }
669
670 /* Now find the stores. */
671 memset (already_set, 0, sizeof (int) * max_gcse_regno);
df35c271
SB
672 FOR_BB_INSNS (bb, insn)
673 {
2ad1dda0 674 if (! NONDEBUG_INSN_P (insn))
df35c271
SB
675 continue;
676
bfac633a
RS
677 FOR_EACH_INSN_DEF (def, insn)
678 already_set[DF_REF_REGNO (def)] = INSN_UID (insn);
df35c271
SB
679
680 /* Now that we've marked regs, look for stores. */
681 find_moveable_store (insn, already_set, last_set_in);
682
683 /* Unmark regs that are no longer set. */
bfac633a
RS
684 FOR_EACH_INSN_DEF (def, insn)
685 if (last_set_in[DF_REF_REGNO (def)] == INSN_UID (insn))
686 last_set_in[DF_REF_REGNO (def)] = 0;
df35c271
SB
687 }
688
689#ifdef ENABLE_CHECKING
690 /* last_set_in should now be all-zero. */
691 for (regno = 0; regno < max_gcse_regno; regno++)
692 gcc_assert (!last_set_in[regno]);
693#endif
694
695 /* Clear temporary marks. */
6c5d4d1a 696 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
df35c271 697 {
6c5d4d1a
SB
698 LAST_AVAIL_CHECK_FAILURE (ptr) = NULL_RTX;
699 if (ptr->antic_stores
3dc99c19
DM
700 && (tmp = ptr->antic_stores->insn ()) == NULL_RTX)
701 ptr->antic_stores = ptr->antic_stores->next ();
df35c271
SB
702 }
703 }
704
705 /* Remove the stores that are not available anywhere, as there will
706 be no opportunity to optimize them. */
6c5d4d1a 707 for (ptr = store_motion_mems, prev_next_ptr_ptr = &store_motion_mems;
df35c271
SB
708 ptr != NULL;
709 ptr = *prev_next_ptr_ptr)
710 {
6c5d4d1a 711 if (! ptr->avail_stores)
df35c271
SB
712 {
713 *prev_next_ptr_ptr = ptr->next;
c203e8a7 714 store_motion_mems_table->remove_elt_with_hash (ptr, ptr->hash_index);
6c5d4d1a 715 free_st_expr_entry (ptr);
df35c271
SB
716 }
717 else
718 prev_next_ptr_ptr = &ptr->next;
719 }
720
6c5d4d1a 721 ret = enumerate_store_motion_mems ();
df35c271
SB
722
723 if (dump_file)
6c5d4d1a 724 print_store_motion_mems (dump_file);
df35c271
SB
725
726 free (last_set_in);
727 free (already_set);
728 return ret;
729}
730
6c5d4d1a
SB
731/* In all code following after this, REACHING_REG has its original
732 meaning again. Avoid confusion, and undef the accessor macro for
733 the temporary marks usage in compute_store_table. */
734#undef LAST_AVAIL_CHECK_FAILURE
735
df35c271
SB
736/* Insert an instruction at the beginning of a basic block, and update
737 the BB_HEAD if needed. */
738
739static void
b4b7724e 740insert_insn_start_basic_block (rtx_insn *insn, basic_block bb)
df35c271
SB
741{
742 /* Insert at start of successor block. */
b4b7724e
DM
743 rtx_insn *prev = PREV_INSN (BB_HEAD (bb));
744 rtx_insn *before = BB_HEAD (bb);
df35c271
SB
745 while (before != 0)
746 {
747 if (! LABEL_P (before)
748 && !NOTE_INSN_BASIC_BLOCK_P (before))
749 break;
750 prev = before;
751 if (prev == BB_END (bb))
752 break;
753 before = NEXT_INSN (before);
754 }
755
756 insn = emit_insn_after_noloc (insn, prev, bb);
757
758 if (dump_file)
759 {
760 fprintf (dump_file, "STORE_MOTION insert store at start of BB %d:\n",
761 bb->index);
762 print_inline_rtx (dump_file, insn, 6);
763 fprintf (dump_file, "\n");
764 }
765}
766
6c5d4d1a 767/* This routine will insert a store on an edge. EXPR is the st_expr entry for
df35c271
SB
768 the memory reference, and E is the edge to insert it on. Returns nonzero
769 if an edge insertion was performed. */
770
771static int
6c5d4d1a 772insert_store (struct st_expr * expr, edge e)
df35c271 773{
b4b7724e
DM
774 rtx reg;
775 rtx_insn *insn;
df35c271
SB
776 basic_block bb;
777 edge tmp;
778 edge_iterator ei;
779
780 /* We did all the deleted before this insert, so if we didn't delete a
781 store, then we haven't set the reaching reg yet either. */
782 if (expr->reaching_reg == NULL_RTX)
783 return 0;
784
785 if (e->flags & EDGE_FAKE)
786 return 0;
787
788 reg = expr->reaching_reg;
b4b7724e 789 insn = as_a <rtx_insn *> (gen_move_insn (copy_rtx (expr->pattern), reg));
df35c271
SB
790
791 /* If we are inserting this expression on ALL predecessor edges of a BB,
792 insert it at the start of the BB, and reset the insert bits on the other
793 edges so we don't try to insert it on the other edges. */
794 bb = e->dest;
795 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
796 if (!(tmp->flags & EDGE_FAKE))
797 {
798 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
b8698a0f 799
df35c271 800 gcc_assert (index != EDGE_INDEX_NO_EDGE);
d7c028c0 801 if (! bitmap_bit_p (st_insert_map[index], expr->index))
df35c271
SB
802 break;
803 }
804
805 /* If tmp is NULL, we found an insertion on every edge, blank the
806 insertion vector for these edges, and insert at the start of the BB. */
fefa31b5 807 if (!tmp && bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
df35c271
SB
808 {
809 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
810 {
811 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
d7c028c0 812 bitmap_clear_bit (st_insert_map[index], expr->index);
df35c271
SB
813 }
814 insert_insn_start_basic_block (insn, bb);
815 return 0;
816 }
817
818 /* We can't put stores in the front of blocks pointed to by abnormal
819 edges since that may put a store where one didn't used to be. */
820 gcc_assert (!(e->flags & EDGE_ABNORMAL));
821
822 insert_insn_on_edge (insn, e);
823
824 if (dump_file)
825 {
826 fprintf (dump_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
827 e->src->index, e->dest->index);
828 print_inline_rtx (dump_file, insn, 6);
829 fprintf (dump_file, "\n");
830 }
831
832 return 1;
833}
834
835/* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
836 memory location in SMEXPR set in basic block BB.
837
838 This could be rather expensive. */
839
840static void
6c5d4d1a 841remove_reachable_equiv_notes (basic_block bb, struct st_expr *smexpr)
df35c271
SB
842{
843 edge_iterator *stack, ei;
844 int sp;
845 edge act;
8b1c6fd7 846 sbitmap visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
b4b7724e
DM
847 rtx last, note;
848 rtx_insn *insn;
df35c271
SB
849 rtx mem = smexpr->pattern;
850
0cae8d31 851 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun));
df35c271
SB
852 sp = 0;
853 ei = ei_start (bb->succs);
854
f61e445a 855 bitmap_clear (visited);
df35c271
SB
856
857 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
858 while (1)
859 {
860 if (!act)
861 {
862 if (!sp)
863 {
864 free (stack);
865 sbitmap_free (visited);
866 return;
867 }
868 act = ei_edge (stack[--sp]);
869 }
870 bb = act->dest;
871
fefa31b5 872 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
d7c028c0 873 || bitmap_bit_p (visited, bb->index))
df35c271
SB
874 {
875 if (!ei_end_p (ei))
876 ei_next (&ei);
877 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
878 continue;
879 }
d7c028c0 880 bitmap_set_bit (visited, bb->index);
df35c271 881
d7c028c0 882 if (bitmap_bit_p (st_antloc[bb->index], smexpr->index))
df35c271 883 {
6c5d4d1a 884 for (last = smexpr->antic_stores;
df35c271
SB
885 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
886 last = XEXP (last, 1))
887 continue;
888 last = XEXP (last, 0);
889 }
890 else
891 last = NEXT_INSN (BB_END (bb));
892
893 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
2ad1dda0 894 if (NONDEBUG_INSN_P (insn))
df35c271
SB
895 {
896 note = find_reg_equal_equiv_note (insn);
897 if (!note || !exp_equiv_p (XEXP (note, 0), mem, 0, true))
898 continue;
899
900 if (dump_file)
901 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
902 INSN_UID (insn));
903 remove_note (insn, note);
904 }
905
906 if (!ei_end_p (ei))
907 ei_next (&ei);
908 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
909
910 if (EDGE_COUNT (bb->succs) > 0)
911 {
912 if (act)
913 stack[sp++] = ei;
914 ei = ei_start (bb->succs);
915 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
916 }
917 }
918}
919
920/* This routine will replace a store with a SET to a specified register. */
921
922static void
6c5d4d1a 923replace_store_insn (rtx reg, rtx del, basic_block bb, struct st_expr *smexpr)
df35c271 924{
b4b7724e
DM
925 rtx_insn *insn;
926 rtx mem, note, set, ptr;
df35c271
SB
927
928 mem = smexpr->pattern;
b4b7724e 929 insn = as_a <rtx_insn *> (gen_move_insn (reg, SET_SRC (single_set (del))));
df35c271 930
6c5d4d1a 931 for (ptr = smexpr->antic_stores; ptr; ptr = XEXP (ptr, 1))
df35c271
SB
932 if (XEXP (ptr, 0) == del)
933 {
934 XEXP (ptr, 0) = insn;
935 break;
936 }
937
938 /* Move the notes from the deleted insn to its replacement. */
939 REG_NOTES (insn) = REG_NOTES (del);
940
941 /* Emit the insn AFTER all the notes are transferred.
942 This is cheaper since we avoid df rescanning for the note change. */
943 insn = emit_insn_after (insn, del);
944
945 if (dump_file)
946 {
947 fprintf (dump_file,
948 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
949 print_inline_rtx (dump_file, del, 6);
950 fprintf (dump_file, "\nSTORE_MOTION replaced with insn:\n ");
951 print_inline_rtx (dump_file, insn, 6);
952 fprintf (dump_file, "\n");
953 }
954
955 delete_insn (del);
956
957 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
958 they are no longer accurate provided that they are reached by this
959 definition, so drop them. */
960 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
2ad1dda0 961 if (NONDEBUG_INSN_P (insn))
df35c271
SB
962 {
963 set = single_set (insn);
964 if (!set)
965 continue;
966 if (exp_equiv_p (SET_DEST (set), mem, 0, true))
967 return;
968 note = find_reg_equal_equiv_note (insn);
969 if (!note || !exp_equiv_p (XEXP (note, 0), mem, 0, true))
970 continue;
971
972 if (dump_file)
973 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
974 INSN_UID (insn));
975 remove_note (insn, note);
976 }
977 remove_reachable_equiv_notes (bb, smexpr);
978}
979
980
981/* Delete a store, but copy the value that would have been stored into
982 the reaching_reg for later storing. */
983
984static void
6c5d4d1a 985delete_store (struct st_expr * expr, basic_block bb)
df35c271
SB
986{
987 rtx reg, i, del;
988
989 if (expr->reaching_reg == NULL_RTX)
990 expr->reaching_reg = gen_reg_rtx_and_attrs (expr->pattern);
991
992 reg = expr->reaching_reg;
993
6c5d4d1a 994 for (i = expr->avail_stores; i; i = XEXP (i, 1))
df35c271
SB
995 {
996 del = XEXP (i, 0);
997 if (BLOCK_FOR_INSN (del) == bb)
998 {
999 /* We know there is only one since we deleted redundant
1000 ones during the available computation. */
1001 replace_store_insn (reg, del, bb, expr);
1002 break;
1003 }
1004 }
1005}
1006
1007/* Fill in available, anticipatable, transparent and kill vectors in
1008 STORE_DATA, based on lists of available and anticipatable stores. */
1009static void
1010build_store_vectors (void)
1011{
1012 basic_block bb;
1013 int *regs_set_in_block;
b32d5189
DM
1014 rtx_insn *insn;
1015 rtx_insn_list *st;
6c5d4d1a 1016 struct st_expr * ptr;
df35c271
SB
1017 unsigned int max_gcse_regno = max_reg_num ();
1018
1019 /* Build the gen_vector. This is any store in the table which is not killed
1020 by aliasing later in its block. */
8b1c6fd7
DM
1021 st_avloc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
1022 num_stores);
1023 bitmap_vector_clear (st_avloc, last_basic_block_for_fn (cfun));
df35c271 1024
8b1c6fd7
DM
1025 st_antloc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
1026 num_stores);
1027 bitmap_vector_clear (st_antloc, last_basic_block_for_fn (cfun));
df35c271 1028
6c5d4d1a 1029 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
df35c271 1030 {
b32d5189 1031 for (st = ptr->avail_stores; st != NULL; st = st->next ())
df35c271 1032 {
b32d5189 1033 insn = st->insn ();
df35c271
SB
1034 bb = BLOCK_FOR_INSN (insn);
1035
1036 /* If we've already seen an available expression in this block,
1037 we can delete this one (It occurs earlier in the block). We'll
1038 copy the SRC expression to an unused register in case there
1039 are any side effects. */
d7c028c0 1040 if (bitmap_bit_p (st_avloc[bb->index], ptr->index))
df35c271
SB
1041 {
1042 rtx r = gen_reg_rtx_and_attrs (ptr->pattern);
1043 if (dump_file)
1044 fprintf (dump_file, "Removing redundant store:\n");
1045 replace_store_insn (r, XEXP (st, 0), bb, ptr);
1046 continue;
1047 }
d7c028c0 1048 bitmap_set_bit (st_avloc[bb->index], ptr->index);
df35c271
SB
1049 }
1050
b32d5189 1051 for (st = ptr->antic_stores; st != NULL; st = st->next ())
df35c271 1052 {
b32d5189 1053 insn = st->insn ();
df35c271 1054 bb = BLOCK_FOR_INSN (insn);
d7c028c0 1055 bitmap_set_bit (st_antloc[bb->index], ptr->index);
df35c271
SB
1056 }
1057 }
1058
8b1c6fd7
DM
1059 st_kill = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_stores);
1060 bitmap_vector_clear (st_kill, last_basic_block_for_fn (cfun));
df35c271 1061
8b1c6fd7
DM
1062 st_transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_stores);
1063 bitmap_vector_clear (st_transp, last_basic_block_for_fn (cfun));
df35c271
SB
1064 regs_set_in_block = XNEWVEC (int, max_gcse_regno);
1065
11cd3bed 1066 FOR_EACH_BB_FN (bb, cfun)
df35c271 1067 {
b114d73a
SB
1068 memset (regs_set_in_block, 0, sizeof (int) * max_gcse_regno);
1069
df35c271 1070 FOR_BB_INSNS (bb, insn)
2ad1dda0 1071 if (NONDEBUG_INSN_P (insn))
df35c271 1072 {
bfac633a
RS
1073 df_ref def;
1074 FOR_EACH_INSN_DEF (def, insn)
df35c271 1075 {
bfac633a 1076 unsigned int ref_regno = DF_REF_REGNO (def);
df35c271 1077 if (ref_regno < max_gcse_regno)
bfac633a 1078 regs_set_in_block[DF_REF_REGNO (def)] = 1;
df35c271
SB
1079 }
1080 }
1081
6c5d4d1a 1082 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
df35c271
SB
1083 {
1084 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
1085 bb, regs_set_in_block, NULL))
1086 {
1087 /* It should not be necessary to consider the expression
1088 killed if it is both anticipatable and available. */
d7c028c0
LC
1089 if (!bitmap_bit_p (st_antloc[bb->index], ptr->index)
1090 || !bitmap_bit_p (st_avloc[bb->index], ptr->index))
1091 bitmap_set_bit (st_kill[bb->index], ptr->index);
df35c271
SB
1092 }
1093 else
d7c028c0 1094 bitmap_set_bit (st_transp[bb->index], ptr->index);
df35c271
SB
1095 }
1096 }
1097
1098 free (regs_set_in_block);
1099
1100 if (dump_file)
1101 {
8b1c6fd7
DM
1102 dump_bitmap_vector (dump_file, "st_antloc", "", st_antloc,
1103 last_basic_block_for_fn (cfun));
1104 dump_bitmap_vector (dump_file, "st_kill", "", st_kill,
1105 last_basic_block_for_fn (cfun));
1106 dump_bitmap_vector (dump_file, "st_transp", "", st_transp,
1107 last_basic_block_for_fn (cfun));
1108 dump_bitmap_vector (dump_file, "st_avloc", "", st_avloc,
1109 last_basic_block_for_fn (cfun));
df35c271
SB
1110 }
1111}
1112
1113/* Free memory used by store motion. */
1114
1115static void
1116free_store_memory (void)
1117{
6c5d4d1a
SB
1118 free_store_motion_mems ();
1119
1120 if (st_avloc)
1121 sbitmap_vector_free (st_avloc);
1122 if (st_kill)
1123 sbitmap_vector_free (st_kill);
1124 if (st_transp)
1125 sbitmap_vector_free (st_transp);
df35c271
SB
1126 if (st_antloc)
1127 sbitmap_vector_free (st_antloc);
6c5d4d1a
SB
1128 if (st_insert_map)
1129 sbitmap_vector_free (st_insert_map);
1130 if (st_delete_map)
1131 sbitmap_vector_free (st_delete_map);
df35c271 1132
6c5d4d1a
SB
1133 st_avloc = st_kill = st_transp = st_antloc = NULL;
1134 st_insert_map = st_delete_map = NULL;
df35c271
SB
1135}
1136
1137/* Perform store motion. Much like gcse, except we move expressions the
1138 other way by looking at the flowgraph in reverse.
1139 Return non-zero if transformations are performed by the pass. */
1140
1141static int
1142one_store_motion_pass (void)
1143{
1144 basic_block bb;
1145 int x;
6c5d4d1a
SB
1146 struct st_expr * ptr;
1147 int did_edge_inserts = 0;
1148 int n_stores_deleted = 0;
1149 int n_stores_created = 0;
df35c271
SB
1150
1151 init_alias_analysis ();
1152
1153 /* Find all the available and anticipatable stores. */
1154 num_stores = compute_store_table ();
1155 if (num_stores == 0)
1156 {
c203e8a7
TS
1157 delete store_motion_mems_table;
1158 store_motion_mems_table = NULL;
df35c271
SB
1159 end_alias_analysis ();
1160 return 0;
1161 }
1162
1163 /* Now compute kill & transp vectors. */
1164 build_store_vectors ();
1165 add_noreturn_fake_exit_edges ();
1166 connect_infinite_loops_to_exit ();
1167
6c5d4d1a
SB
1168 edge_list = pre_edge_rev_lcm (num_stores, st_transp, st_avloc,
1169 st_antloc, st_kill, &st_insert_map,
1170 &st_delete_map);
df35c271
SB
1171
1172 /* Now we want to insert the new stores which are going to be needed. */
6c5d4d1a 1173 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
df35c271
SB
1174 {
1175 /* If any of the edges we have above are abnormal, we can't move this
1176 store. */
1177 for (x = NUM_EDGES (edge_list) - 1; x >= 0; x--)
d7c028c0 1178 if (bitmap_bit_p (st_insert_map[x], ptr->index)
df35c271
SB
1179 && (INDEX_EDGE (edge_list, x)->flags & EDGE_ABNORMAL))
1180 break;
1181
1182 if (x >= 0)
1183 {
1184 if (dump_file != NULL)
1185 fprintf (dump_file,
1186 "Can't replace store %d: abnormal edge from %d to %d\n",
1187 ptr->index, INDEX_EDGE (edge_list, x)->src->index,
1188 INDEX_EDGE (edge_list, x)->dest->index);
1189 continue;
1190 }
b8698a0f 1191
df35c271
SB
1192 /* Now we want to insert the new stores which are going to be needed. */
1193
11cd3bed 1194 FOR_EACH_BB_FN (bb, cfun)
d7c028c0 1195 if (bitmap_bit_p (st_delete_map[bb->index], ptr->index))
df35c271
SB
1196 {
1197 delete_store (ptr, bb);
6c5d4d1a 1198 n_stores_deleted++;
df35c271
SB
1199 }
1200
1201 for (x = 0; x < NUM_EDGES (edge_list); x++)
d7c028c0 1202 if (bitmap_bit_p (st_insert_map[x], ptr->index))
df35c271 1203 {
6c5d4d1a
SB
1204 did_edge_inserts |= insert_store (ptr, INDEX_EDGE (edge_list, x));
1205 n_stores_created++;
df35c271
SB
1206 }
1207 }
1208
6c5d4d1a 1209 if (did_edge_inserts)
df35c271
SB
1210 commit_edge_insertions ();
1211
1212 free_store_memory ();
1213 free_edge_list (edge_list);
1214 remove_fake_exit_edges ();
1215 end_alias_analysis ();
1216
1217 if (dump_file)
1218 {
1219 fprintf (dump_file, "STORE_MOTION of %s, %d basic blocks, ",
0cae8d31 1220 current_function_name (), n_basic_blocks_for_fn (cfun));
6c5d4d1a
SB
1221 fprintf (dump_file, "%d insns deleted, %d insns created\n",
1222 n_stores_deleted, n_stores_created);
df35c271
SB
1223 }
1224
6c5d4d1a 1225 return (n_stores_deleted > 0 || n_stores_created > 0);
df35c271
SB
1226}
1227
1228\f
df35c271
SB
1229static unsigned int
1230execute_rtl_store_motion (void)
1231{
1232 delete_unreachable_blocks ();
df35c271
SB
1233 df_analyze ();
1234 flag_rerun_cse_after_global_opts |= one_store_motion_pass ();
1235 return 0;
1236}
1237
27a4cd48
DM
1238namespace {
1239
1240const pass_data pass_data_rtl_store_motion =
df35c271 1241{
27a4cd48
DM
1242 RTL_PASS, /* type */
1243 "store_motion", /* name */
1244 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
1245 TV_LSM, /* tv_id */
1246 PROP_cfglayout, /* properties_required */
1247 0, /* properties_provided */
1248 0, /* properties_destroyed */
1249 0, /* todo_flags_start */
3bea341f 1250 TODO_df_finish, /* todo_flags_finish */
df35c271 1251};
27a4cd48
DM
1252
1253class pass_rtl_store_motion : public rtl_opt_pass
1254{
1255public:
c3284718
RS
1256 pass_rtl_store_motion (gcc::context *ctxt)
1257 : rtl_opt_pass (pass_data_rtl_store_motion, ctxt)
27a4cd48
DM
1258 {}
1259
1260 /* opt_pass methods: */
1a3d085c 1261 virtual bool gate (function *);
be55bfe6
TS
1262 virtual unsigned int execute (function *)
1263 {
1264 return execute_rtl_store_motion ();
1265 }
27a4cd48
DM
1266
1267}; // class pass_rtl_store_motion
1268
1a3d085c
TS
1269bool
1270pass_rtl_store_motion::gate (function *fun)
1271{
1272 return optimize > 0 && flag_gcse_sm
1273 && !fun->calls_setjmp
1274 && optimize_function_for_speed_p (fun)
1275 && dbg_cnt (store_motion);
1276}
1277
27a4cd48
DM
1278} // anon namespace
1279
1280rtl_opt_pass *
1281make_pass_rtl_store_motion (gcc::context *ctxt)
1282{
1283 return new pass_rtl_store_motion (ctxt);
1284}
This page took 3.141404 seconds and 5 git commands to generate.