]> gcc.gnu.org Git - gcc.git/blame - gcc/store-motion.c
syscall: Additional constants, some type corrections.
[gcc.git] / gcc / store-motion.c
CommitLineData
df35c271
SB
1/* Store motion via Lazy Code Motion on the reverse CFG.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
c75c517d 3 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
df35c271
SB
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
718f9c0f 25#include "diagnostic-core.h"
df35c271
SB
26#include "toplev.h"
27
28#include "rtl.h"
29#include "tree.h"
30#include "tm_p.h"
31#include "regs.h"
32#include "hard-reg-set.h"
33#include "flags.h"
df35c271
SB
34#include "insn-config.h"
35#include "recog.h"
36#include "basic-block.h"
37#include "output.h"
38#include "function.h"
39#include "expr.h"
40#include "except.h"
41#include "ggc.h"
df35c271
SB
42#include "intl.h"
43#include "timevar.h"
44#include "tree-pass.h"
45#include "hashtab.h"
46#include "df.h"
47#include "dbgcnt.h"
48
6c5d4d1a
SB
49/* This pass implements downward store motion.
50 As of May 1, 2009, the pass is not enabled by default on any target,
51 but bootstrap completes on ia64 and x86_64 with the pass enabled. */
52
53/* TODO:
54 - remove_reachable_equiv_notes is an incomprehensible pile of goo and
55 a compile time hog that needs a rewrite (maybe cache st_exprs to
56 invalidate REG_EQUAL/REG_EQUIV notes for?).
57 - pattern_regs in st_expr should be a regset (on its own obstack).
58 - antic_stores and avail_stores should be VECs instead of lists.
59 - store_motion_mems should be a VEC instead of a list.
60 - there should be an alloc pool for struct st_expr objects.
61 - investigate whether it is helpful to make the address of an st_expr
62 a cselib VALUE.
63 - when GIMPLE alias information is exported, the effectiveness of this
64 pass should be re-evaluated.
65*/
66
67/* This is a list of store expressions (MEMs). The structure is used
68 as an expression table to track stores which look interesting, and
69 might be moveable towards the exit block. */
70
71struct st_expr
df35c271 72{
6c5d4d1a
SB
73 /* Pattern of this mem. */
74 rtx pattern;
75 /* List of registers mentioned by the mem. */
76 rtx pattern_regs;
77 /* INSN list of stores that are locally anticipatable. */
78 rtx antic_stores;
79 /* INSN list of stores that are locally available. */
80 rtx avail_stores;
81 /* Next in the list. */
82 struct st_expr * next;
83 /* Store ID in the dataflow bitmaps. */
84 int index;
85 /* Hash value for the hash table. */
86 unsigned int hash_index;
87 /* Register holding the stored expression when a store is moved.
88 This field is also used as a cache in find_moveable_store, see
89 LAST_AVAIL_CHECK_FAILURE below. */
90 rtx reaching_reg;
df35c271
SB
91};
92
93/* Head of the list of load/store memory refs. */
6c5d4d1a 94static struct st_expr * store_motion_mems = NULL;
df35c271
SB
95
96/* Hashtable for the load/store memory refs. */
6c5d4d1a 97static htab_t store_motion_mems_table = NULL;
df35c271 98
6c5d4d1a
SB
99/* These bitmaps will hold the local dataflow properties per basic block. */
100static sbitmap *st_kill, *st_avloc, *st_antloc, *st_transp;
df35c271
SB
101
102/* Nonzero for expressions which should be inserted on a specific edge. */
6c5d4d1a 103static sbitmap *st_insert_map;
df35c271
SB
104
105/* Nonzero for expressions which should be deleted in a specific block. */
6c5d4d1a
SB
106static sbitmap *st_delete_map;
107
108/* Global holding the number of store expressions we are dealing with. */
109static int num_stores;
df35c271
SB
110
111/* Contains the edge_list returned by pre_edge_lcm. */
112static struct edge_list *edge_list;
113
df35c271 114static hashval_t
6c5d4d1a 115pre_st_expr_hash (const void *p)
df35c271
SB
116{
117 int do_not_record_p = 0;
6c5d4d1a 118 const struct st_expr *const x = (const struct st_expr *) p;
df35c271
SB
119 return hash_rtx (x->pattern, GET_MODE (x->pattern), &do_not_record_p, NULL, false);
120}
121
122static int
6c5d4d1a 123pre_st_expr_eq (const void *p1, const void *p2)
df35c271 124{
6c5d4d1a
SB
125 const struct st_expr *const ptr1 = (const struct st_expr *) p1,
126 *const ptr2 = (const struct st_expr *) p2;
df35c271
SB
127 return exp_equiv_p (ptr1->pattern, ptr2->pattern, 0, true);
128}
129
6c5d4d1a 130/* This will search the st_expr list for a matching expression. If it
df35c271
SB
131 doesn't find one, we create one and initialize it. */
132
6c5d4d1a
SB
133static struct st_expr *
134st_expr_entry (rtx x)
df35c271
SB
135{
136 int do_not_record_p = 0;
6c5d4d1a 137 struct st_expr * ptr;
df35c271
SB
138 unsigned int hash;
139 void **slot;
6c5d4d1a 140 struct st_expr e;
df35c271
SB
141
142 hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
143 NULL, /*have_reg_qty=*/false);
144
145 e.pattern = x;
6c5d4d1a 146 slot = htab_find_slot_with_hash (store_motion_mems_table, &e, hash, INSERT);
df35c271 147 if (*slot)
6c5d4d1a 148 return (struct st_expr *)*slot;
df35c271 149
6c5d4d1a 150 ptr = XNEW (struct st_expr);
df35c271 151
6c5d4d1a 152 ptr->next = store_motion_mems;
df35c271
SB
153 ptr->pattern = x;
154 ptr->pattern_regs = NULL_RTX;
6c5d4d1a
SB
155 ptr->antic_stores = NULL_RTX;
156 ptr->avail_stores = NULL_RTX;
df35c271 157 ptr->reaching_reg = NULL_RTX;
df35c271
SB
158 ptr->index = 0;
159 ptr->hash_index = hash;
6c5d4d1a 160 store_motion_mems = ptr;
df35c271
SB
161 *slot = ptr;
162
163 return ptr;
164}
165
6c5d4d1a 166/* Free up an individual st_expr entry. */
df35c271
SB
167
168static void
6c5d4d1a 169free_st_expr_entry (struct st_expr * ptr)
df35c271 170{
6c5d4d1a
SB
171 free_INSN_LIST_list (& ptr->antic_stores);
172 free_INSN_LIST_list (& ptr->avail_stores);
df35c271
SB
173
174 free (ptr);
175}
176
6c5d4d1a 177/* Free up all memory associated with the st_expr list. */
df35c271
SB
178
179static void
6c5d4d1a 180free_store_motion_mems (void)
df35c271 181{
6c5d4d1a
SB
182 if (store_motion_mems_table)
183 htab_delete (store_motion_mems_table);
184 store_motion_mems_table = NULL;
df35c271 185
6c5d4d1a 186 while (store_motion_mems)
df35c271 187 {
6c5d4d1a
SB
188 struct st_expr * tmp = store_motion_mems;
189 store_motion_mems = store_motion_mems->next;
190 free_st_expr_entry (tmp);
df35c271 191 }
6c5d4d1a 192 store_motion_mems = NULL;
df35c271
SB
193}
194
195/* Assign each element of the list of mems a monotonically increasing value. */
196
197static int
6c5d4d1a 198enumerate_store_motion_mems (void)
df35c271 199{
6c5d4d1a 200 struct st_expr * ptr;
df35c271
SB
201 int n = 0;
202
6c5d4d1a 203 for (ptr = store_motion_mems; ptr != NULL; ptr = ptr->next)
df35c271
SB
204 ptr->index = n++;
205
206 return n;
207}
208
209/* Return first item in the list. */
210
6c5d4d1a
SB
211static inline struct st_expr *
212first_st_expr (void)
df35c271 213{
6c5d4d1a 214 return store_motion_mems;
df35c271
SB
215}
216
217/* Return the next item in the list after the specified one. */
218
6c5d4d1a
SB
219static inline struct st_expr *
220next_st_expr (struct st_expr * ptr)
df35c271
SB
221{
222 return ptr->next;
223}
224
6c5d4d1a 225/* Dump debugging info about the store_motion_mems list. */
df35c271
SB
226
227static void
6c5d4d1a 228print_store_motion_mems (FILE * file)
df35c271 229{
6c5d4d1a 230 struct st_expr * ptr;
df35c271 231
6c5d4d1a 232 fprintf (dump_file, "STORE_MOTION list of MEM exprs considered:\n");
df35c271 233
6c5d4d1a 234 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
df35c271
SB
235 {
236 fprintf (file, " Pattern (%3d): ", ptr->index);
237
238 print_rtl (file, ptr->pattern);
239
6c5d4d1a 240 fprintf (file, "\n ANTIC stores : ");
df35c271 241
6c5d4d1a
SB
242 if (ptr->antic_stores)
243 print_rtl (file, ptr->antic_stores);
df35c271
SB
244 else
245 fprintf (file, "(nil)");
246
6c5d4d1a 247 fprintf (file, "\n AVAIL stores : ");
df35c271 248
6c5d4d1a
SB
249 if (ptr->avail_stores)
250 print_rtl (file, ptr->avail_stores);
df35c271
SB
251 else
252 fprintf (file, "(nil)");
253
254 fprintf (file, "\n\n");
255 }
256
257 fprintf (file, "\n");
258}
259\f
df35c271
SB
260/* Return zero if some of the registers in list X are killed
261 due to set of registers in bitmap REGS_SET. */
262
263static bool
264store_ops_ok (const_rtx x, int *regs_set)
265{
266 const_rtx reg;
267
268 for (; x; x = XEXP (x, 1))
269 {
270 reg = XEXP (x, 0);
271 if (regs_set[REGNO(reg)])
272 return false;
273 }
274
275 return true;
276}
277
6c5d4d1a 278/* Helper for extract_mentioned_regs. */
b8698a0f 279
6c5d4d1a
SB
280static int
281extract_mentioned_regs_1 (rtx *loc, void *data)
df35c271 282{
6c5d4d1a 283 rtx *mentioned_regs_p = (rtx *) data;
df35c271 284
6c5d4d1a
SB
285 if (REG_P (*loc))
286 *mentioned_regs_p = alloc_EXPR_LIST (0, *loc, *mentioned_regs_p);
df35c271 287
6c5d4d1a 288 return 0;
df35c271
SB
289}
290
6c5d4d1a
SB
291/* Returns a list of registers mentioned in X.
292 FIXME: A regset would be prettier and less expensive. */
293
df35c271
SB
294static rtx
295extract_mentioned_regs (rtx x)
296{
6c5d4d1a
SB
297 rtx mentioned_regs = NULL;
298 for_each_rtx (&x, extract_mentioned_regs_1, &mentioned_regs);
299 return mentioned_regs;
df35c271
SB
300}
301
302/* Check to see if the load X is aliased with STORE_PATTERN.
303 AFTER is true if we are checking the case when STORE_PATTERN occurs
304 after the X. */
305
306static bool
307load_kills_store (const_rtx x, const_rtx store_pattern, int after)
308{
309 if (after)
310 return anti_dependence (x, store_pattern);
311 else
53d9622b 312 return true_dependence (store_pattern, GET_MODE (store_pattern), x);
df35c271
SB
313}
314
6c5d4d1a 315/* Go through the entire rtx X, looking for any loads which might alias
df35c271
SB
316 STORE_PATTERN. Return true if found.
317 AFTER is true if we are checking the case when STORE_PATTERN occurs
318 after the insn X. */
319
320static bool
321find_loads (const_rtx x, const_rtx store_pattern, int after)
322{
323 const char * fmt;
324 int i, j;
325 int ret = false;
326
327 if (!x)
328 return false;
329
330 if (GET_CODE (x) == SET)
331 x = SET_SRC (x);
332
333 if (MEM_P (x))
334 {
335 if (load_kills_store (x, store_pattern, after))
336 return true;
337 }
338
339 /* Recursively process the insn. */
340 fmt = GET_RTX_FORMAT (GET_CODE (x));
341
342 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
343 {
344 if (fmt[i] == 'e')
345 ret |= find_loads (XEXP (x, i), store_pattern, after);
346 else if (fmt[i] == 'E')
347 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
348 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
349 }
350 return ret;
351}
352
353/* Go through pattern PAT looking for any loads which might kill the
354 store in X. Return true if found.
355 AFTER is true if we are checking the case when loads kill X occurs
356 after the insn for PAT. */
357
358static inline bool
359store_killed_in_pat (const_rtx x, const_rtx pat, int after)
360{
361 if (GET_CODE (pat) == SET)
362 {
363 rtx dest = SET_DEST (pat);
364
365 if (GET_CODE (dest) == ZERO_EXTRACT)
366 dest = XEXP (dest, 0);
367
368 /* Check for memory stores to aliased objects. */
369 if (MEM_P (dest)
370 && !exp_equiv_p (dest, x, 0, true))
371 {
372 if (after)
373 {
374 if (output_dependence (dest, x))
375 return true;
376 }
377 else
378 {
379 if (output_dependence (x, dest))
380 return true;
381 }
382 }
383 }
384
385 if (find_loads (pat, x, after))
386 return true;
387
388 return false;
389}
390
391/* Check if INSN kills the store pattern X (is aliased with it).
392 AFTER is true if we are checking the case when store X occurs
393 after the insn. Return true if it does. */
394
395static bool
396store_killed_in_insn (const_rtx x, const_rtx x_regs, const_rtx insn, int after)
397{
398 const_rtx reg, base, note, pat;
399
2ad1dda0 400 if (! NONDEBUG_INSN_P (insn))
df35c271
SB
401 return false;
402
403 if (CALL_P (insn))
404 {
405 /* A normal or pure call might read from pattern,
406 but a const call will not. */
407 if (!RTL_CONST_CALL_P (insn))
408 return true;
409
410 /* But even a const call reads its parameters. Check whether the
411 base of some of registers used in mem is stack pointer. */
412 for (reg = x_regs; reg; reg = XEXP (reg, 1))
413 {
414 base = find_base_term (XEXP (reg, 0));
415 if (!base
416 || (GET_CODE (base) == ADDRESS
417 && GET_MODE (base) == Pmode
418 && XEXP (base, 0) == stack_pointer_rtx))
419 return true;
420 }
421
422 return false;
423 }
424
425 pat = PATTERN (insn);
426 if (GET_CODE (pat) == SET)
427 {
428 if (store_killed_in_pat (x, pat, after))
429 return true;
430 }
431 else if (GET_CODE (pat) == PARALLEL)
432 {
433 int i;
434
435 for (i = 0; i < XVECLEN (pat, 0); i++)
436 if (store_killed_in_pat (x, XVECEXP (pat, 0, i), after))
437 return true;
438 }
439 else if (find_loads (PATTERN (insn), x, after))
440 return true;
441
442 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
443 location aliased with X, then this insn kills X. */
444 note = find_reg_equal_equiv_note (insn);
445 if (! note)
446 return false;
447 note = XEXP (note, 0);
448
449 /* However, if the note represents a must alias rather than a may
450 alias relationship, then it does not kill X. */
451 if (exp_equiv_p (note, x, 0, true))
452 return false;
453
454 /* See if there are any aliased loads in the note. */
455 return find_loads (note, x, after);
456}
457
458/* Returns true if the expression X is loaded or clobbered on or after INSN
459 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
460 or after the insn. X_REGS is list of registers mentioned in X. If the store
461 is killed, return the last insn in that it occurs in FAIL_INSN. */
462
463static bool
464store_killed_after (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb,
465 int *regs_set_after, rtx *fail_insn)
466{
467 rtx last = BB_END (bb), act;
468
469 if (!store_ops_ok (x_regs, regs_set_after))
470 {
471 /* We do not know where it will happen. */
472 if (fail_insn)
473 *fail_insn = NULL_RTX;
474 return true;
475 }
476
477 /* Scan from the end, so that fail_insn is determined correctly. */
478 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
479 if (store_killed_in_insn (x, x_regs, act, false))
480 {
481 if (fail_insn)
482 *fail_insn = act;
483 return true;
484 }
485
486 return false;
487}
488
489/* Returns true if the expression X is loaded or clobbered on or before INSN
490 within basic block BB. X_REGS is list of registers mentioned in X.
491 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
492static bool
493store_killed_before (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb,
494 int *regs_set_before)
495{
496 rtx first = BB_HEAD (bb);
497
498 if (!store_ops_ok (x_regs, regs_set_before))
499 return true;
500
501 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
502 if (store_killed_in_insn (x, x_regs, insn, true))
503 return true;
504
505 return false;
506}
507
6c5d4d1a
SB
508/* The last insn in the basic block that compute_store_table is processing,
509 where store_killed_after is true for X.
510 Since we go through the basic block from BB_END to BB_HEAD, this is
511 also the available store at the end of the basic block. Therefore
512 this is in effect a cache, to avoid calling store_killed_after for
513 equivalent aliasing store expressions.
514 This value is only meaningful during the computation of the store
515 table. We hi-jack the REACHING_REG field of struct st_expr to save
516 a bit of memory. */
517#define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
518
df35c271
SB
519/* Determine whether INSN is MEM store pattern that we will consider moving.
520 REGS_SET_BEFORE is bitmap of registers set before (and including) the
521 current insn, REGS_SET_AFTER is bitmap of registers set after (and
522 including) the insn in this basic block. We must be passing through BB from
523 head to end, as we are using this fact to speed things up.
524
525 The results are stored this way:
526
6c5d4d1a 527 -- the first anticipatable expression is added into ANTIC_STORES
df35c271
SB
528 -- if the processed expression is not anticipatable, NULL_RTX is added
529 there instead, so that we can use it as indicator that no further
530 expression of this type may be anticipatable
6c5d4d1a 531 -- if the expression is available, it is added as head of AVAIL_STORES;
df35c271
SB
532 consequently, all of them but this head are dead and may be deleted.
533 -- if the expression is not available, the insn due to that it fails to be
6c5d4d1a 534 available is stored in REACHING_REG (via LAST_AVAIL_CHECK_FAILURE).
df35c271
SB
535
536 The things are complicated a bit by fact that there already may be stores
537 to the same MEM from other blocks; also caller must take care of the
538 necessary cleanup of the temporary markers after end of the basic block.
539 */
540
541static void
542find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
543{
6c5d4d1a 544 struct st_expr * ptr;
df35c271
SB
545 rtx dest, set, tmp;
546 int check_anticipatable, check_available;
547 basic_block bb = BLOCK_FOR_INSN (insn);
548
549 set = single_set (insn);
550 if (!set)
551 return;
552
553 dest = SET_DEST (set);
554
555 if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
556 || GET_MODE (dest) == BLKmode)
557 return;
558
559 if (side_effects_p (dest))
560 return;
561
562 /* If we are handling exceptions, we must be careful with memory references
8f4f502f 563 that may trap. If we are not, the behavior is undefined, so we may just
df35c271 564 continue. */
8f4f502f 565 if (cfun->can_throw_non_call_exceptions && may_trap_p (dest))
df35c271
SB
566 return;
567
568 /* Even if the destination cannot trap, the source may. In this case we'd
569 need to handle updating the REG_EH_REGION note. */
570 if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
571 return;
572
573 /* Make sure that the SET_SRC of this store insns can be assigned to
574 a register, or we will fail later on in replace_store_insn, which
575 assumes that we can do this. But sometimes the target machine has
576 oddities like MEM read-modify-write instruction. See for example
577 PR24257. */
578 if (!can_assign_to_reg_without_clobbers_p (SET_SRC (set)))
579 return;
580
6c5d4d1a 581 ptr = st_expr_entry (dest);
df35c271
SB
582 if (!ptr->pattern_regs)
583 ptr->pattern_regs = extract_mentioned_regs (dest);
584
585 /* Do not check for anticipatability if we either found one anticipatable
586 store already, or tested for one and found out that it was killed. */
587 check_anticipatable = 0;
6c5d4d1a 588 if (!ptr->antic_stores)
df35c271
SB
589 check_anticipatable = 1;
590 else
591 {
6c5d4d1a 592 tmp = XEXP (ptr->antic_stores, 0);
df35c271
SB
593 if (tmp != NULL_RTX
594 && BLOCK_FOR_INSN (tmp) != bb)
595 check_anticipatable = 1;
596 }
597 if (check_anticipatable)
598 {
599 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
600 tmp = NULL_RTX;
601 else
602 tmp = insn;
6c5d4d1a 603 ptr->antic_stores = alloc_INSN_LIST (tmp, ptr->antic_stores);
df35c271
SB
604 }
605
606 /* It is not necessary to check whether store is available if we did
607 it successfully before; if we failed before, do not bother to check
608 until we reach the insn that caused us to fail. */
609 check_available = 0;
6c5d4d1a 610 if (!ptr->avail_stores)
df35c271
SB
611 check_available = 1;
612 else
613 {
6c5d4d1a 614 tmp = XEXP (ptr->avail_stores, 0);
df35c271
SB
615 if (BLOCK_FOR_INSN (tmp) != bb)
616 check_available = 1;
617 }
618 if (check_available)
619 {
620 /* Check that we have already reached the insn at that the check
621 failed last time. */
622 if (LAST_AVAIL_CHECK_FAILURE (ptr))
623 {
624 for (tmp = BB_END (bb);
625 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
626 tmp = PREV_INSN (tmp))
627 continue;
628 if (tmp == insn)
629 check_available = 0;
630 }
631 else
632 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
633 bb, regs_set_after,
634 &LAST_AVAIL_CHECK_FAILURE (ptr));
635 }
636 if (!check_available)
6c5d4d1a 637 ptr->avail_stores = alloc_INSN_LIST (insn, ptr->avail_stores);
df35c271
SB
638}
639
640/* Find available and anticipatable stores. */
641
642static int
643compute_store_table (void)
644{
645 int ret;
646 basic_block bb;
2ed1959a 647#ifdef ENABLE_CHECKING
df35c271 648 unsigned regno;
2ed1959a 649#endif
6c5d4d1a
SB
650 rtx insn, tmp;
651 df_ref *def_rec;
df35c271 652 int *last_set_in, *already_set;
6c5d4d1a 653 struct st_expr * ptr, **prev_next_ptr_ptr;
df35c271
SB
654 unsigned int max_gcse_regno = max_reg_num ();
655
6c5d4d1a
SB
656 store_motion_mems = NULL;
657 store_motion_mems_table = htab_create (13, pre_st_expr_hash,
658 pre_st_expr_eq, NULL);
df35c271
SB
659 last_set_in = XCNEWVEC (int, max_gcse_regno);
660 already_set = XNEWVEC (int, max_gcse_regno);
661
662 /* Find all the stores we care about. */
663 FOR_EACH_BB (bb)
664 {
665 /* First compute the registers set in this block. */
df35c271
SB
666 FOR_BB_INSNS (bb, insn)
667 {
6c5d4d1a 668
2ad1dda0 669 if (! NONDEBUG_INSN_P (insn))
df35c271
SB
670 continue;
671
6c5d4d1a
SB
672 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
673 last_set_in[DF_REF_REGNO (*def_rec)] = INSN_UID (insn);
df35c271
SB
674 }
675
676 /* Now find the stores. */
677 memset (already_set, 0, sizeof (int) * max_gcse_regno);
df35c271
SB
678 FOR_BB_INSNS (bb, insn)
679 {
2ad1dda0 680 if (! NONDEBUG_INSN_P (insn))
df35c271
SB
681 continue;
682
6c5d4d1a
SB
683 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
684 already_set[DF_REF_REGNO (*def_rec)] = INSN_UID (insn);
df35c271
SB
685
686 /* Now that we've marked regs, look for stores. */
687 find_moveable_store (insn, already_set, last_set_in);
688
689 /* Unmark regs that are no longer set. */
6c5d4d1a
SB
690 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
691 if (last_set_in[DF_REF_REGNO (*def_rec)] == INSN_UID (insn))
692 last_set_in[DF_REF_REGNO (*def_rec)] = 0;
df35c271
SB
693 }
694
695#ifdef ENABLE_CHECKING
696 /* last_set_in should now be all-zero. */
697 for (regno = 0; regno < max_gcse_regno; regno++)
698 gcc_assert (!last_set_in[regno]);
699#endif
700
701 /* Clear temporary marks. */
6c5d4d1a 702 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
df35c271 703 {
6c5d4d1a
SB
704 LAST_AVAIL_CHECK_FAILURE (ptr) = NULL_RTX;
705 if (ptr->antic_stores
706 && (tmp = XEXP (ptr->antic_stores, 0)) == NULL_RTX)
707 ptr->antic_stores = XEXP (ptr->antic_stores, 1);
df35c271
SB
708 }
709 }
710
711 /* Remove the stores that are not available anywhere, as there will
712 be no opportunity to optimize them. */
6c5d4d1a 713 for (ptr = store_motion_mems, prev_next_ptr_ptr = &store_motion_mems;
df35c271
SB
714 ptr != NULL;
715 ptr = *prev_next_ptr_ptr)
716 {
6c5d4d1a 717 if (! ptr->avail_stores)
df35c271
SB
718 {
719 *prev_next_ptr_ptr = ptr->next;
6c5d4d1a
SB
720 htab_remove_elt_with_hash (store_motion_mems_table,
721 ptr, ptr->hash_index);
722 free_st_expr_entry (ptr);
df35c271
SB
723 }
724 else
725 prev_next_ptr_ptr = &ptr->next;
726 }
727
6c5d4d1a 728 ret = enumerate_store_motion_mems ();
df35c271
SB
729
730 if (dump_file)
6c5d4d1a 731 print_store_motion_mems (dump_file);
df35c271
SB
732
733 free (last_set_in);
734 free (already_set);
735 return ret;
736}
737
6c5d4d1a
SB
738/* In all code following after this, REACHING_REG has its original
739 meaning again. Avoid confusion, and undef the accessor macro for
740 the temporary marks usage in compute_store_table. */
741#undef LAST_AVAIL_CHECK_FAILURE
742
df35c271
SB
743/* Insert an instruction at the beginning of a basic block, and update
744 the BB_HEAD if needed. */
745
746static void
747insert_insn_start_basic_block (rtx insn, basic_block bb)
748{
749 /* Insert at start of successor block. */
750 rtx prev = PREV_INSN (BB_HEAD (bb));
751 rtx before = BB_HEAD (bb);
752 while (before != 0)
753 {
754 if (! LABEL_P (before)
755 && !NOTE_INSN_BASIC_BLOCK_P (before))
756 break;
757 prev = before;
758 if (prev == BB_END (bb))
759 break;
760 before = NEXT_INSN (before);
761 }
762
763 insn = emit_insn_after_noloc (insn, prev, bb);
764
765 if (dump_file)
766 {
767 fprintf (dump_file, "STORE_MOTION insert store at start of BB %d:\n",
768 bb->index);
769 print_inline_rtx (dump_file, insn, 6);
770 fprintf (dump_file, "\n");
771 }
772}
773
6c5d4d1a 774/* This routine will insert a store on an edge. EXPR is the st_expr entry for
df35c271
SB
775 the memory reference, and E is the edge to insert it on. Returns nonzero
776 if an edge insertion was performed. */
777
778static int
6c5d4d1a 779insert_store (struct st_expr * expr, edge e)
df35c271
SB
780{
781 rtx reg, insn;
782 basic_block bb;
783 edge tmp;
784 edge_iterator ei;
785
786 /* We did all the deleted before this insert, so if we didn't delete a
787 store, then we haven't set the reaching reg yet either. */
788 if (expr->reaching_reg == NULL_RTX)
789 return 0;
790
791 if (e->flags & EDGE_FAKE)
792 return 0;
793
794 reg = expr->reaching_reg;
795 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
796
797 /* If we are inserting this expression on ALL predecessor edges of a BB,
798 insert it at the start of the BB, and reset the insert bits on the other
799 edges so we don't try to insert it on the other edges. */
800 bb = e->dest;
801 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
802 if (!(tmp->flags & EDGE_FAKE))
803 {
804 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
b8698a0f 805
df35c271 806 gcc_assert (index != EDGE_INDEX_NO_EDGE);
6c5d4d1a 807 if (! TEST_BIT (st_insert_map[index], expr->index))
df35c271
SB
808 break;
809 }
810
811 /* If tmp is NULL, we found an insertion on every edge, blank the
812 insertion vector for these edges, and insert at the start of the BB. */
813 if (!tmp && bb != EXIT_BLOCK_PTR)
814 {
815 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
816 {
817 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6c5d4d1a 818 RESET_BIT (st_insert_map[index], expr->index);
df35c271
SB
819 }
820 insert_insn_start_basic_block (insn, bb);
821 return 0;
822 }
823
824 /* We can't put stores in the front of blocks pointed to by abnormal
825 edges since that may put a store where one didn't used to be. */
826 gcc_assert (!(e->flags & EDGE_ABNORMAL));
827
828 insert_insn_on_edge (insn, e);
829
830 if (dump_file)
831 {
832 fprintf (dump_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
833 e->src->index, e->dest->index);
834 print_inline_rtx (dump_file, insn, 6);
835 fprintf (dump_file, "\n");
836 }
837
838 return 1;
839}
840
841/* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
842 memory location in SMEXPR set in basic block BB.
843
844 This could be rather expensive. */
845
846static void
6c5d4d1a 847remove_reachable_equiv_notes (basic_block bb, struct st_expr *smexpr)
df35c271
SB
848{
849 edge_iterator *stack, ei;
850 int sp;
851 edge act;
852 sbitmap visited = sbitmap_alloc (last_basic_block);
853 rtx last, insn, note;
854 rtx mem = smexpr->pattern;
855
856 stack = XNEWVEC (edge_iterator, n_basic_blocks);
857 sp = 0;
858 ei = ei_start (bb->succs);
859
860 sbitmap_zero (visited);
861
862 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
863 while (1)
864 {
865 if (!act)
866 {
867 if (!sp)
868 {
869 free (stack);
870 sbitmap_free (visited);
871 return;
872 }
873 act = ei_edge (stack[--sp]);
874 }
875 bb = act->dest;
876
877 if (bb == EXIT_BLOCK_PTR
878 || TEST_BIT (visited, bb->index))
879 {
880 if (!ei_end_p (ei))
881 ei_next (&ei);
882 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
883 continue;
884 }
885 SET_BIT (visited, bb->index);
886
887 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
888 {
6c5d4d1a 889 for (last = smexpr->antic_stores;
df35c271
SB
890 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
891 last = XEXP (last, 1))
892 continue;
893 last = XEXP (last, 0);
894 }
895 else
896 last = NEXT_INSN (BB_END (bb));
897
898 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
2ad1dda0 899 if (NONDEBUG_INSN_P (insn))
df35c271
SB
900 {
901 note = find_reg_equal_equiv_note (insn);
902 if (!note || !exp_equiv_p (XEXP (note, 0), mem, 0, true))
903 continue;
904
905 if (dump_file)
906 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
907 INSN_UID (insn));
908 remove_note (insn, note);
909 }
910
911 if (!ei_end_p (ei))
912 ei_next (&ei);
913 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
914
915 if (EDGE_COUNT (bb->succs) > 0)
916 {
917 if (act)
918 stack[sp++] = ei;
919 ei = ei_start (bb->succs);
920 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
921 }
922 }
923}
924
925/* This routine will replace a store with a SET to a specified register. */
926
927static void
6c5d4d1a 928replace_store_insn (rtx reg, rtx del, basic_block bb, struct st_expr *smexpr)
df35c271
SB
929{
930 rtx insn, mem, note, set, ptr;
931
932 mem = smexpr->pattern;
933 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
934
6c5d4d1a 935 for (ptr = smexpr->antic_stores; ptr; ptr = XEXP (ptr, 1))
df35c271
SB
936 if (XEXP (ptr, 0) == del)
937 {
938 XEXP (ptr, 0) = insn;
939 break;
940 }
941
942 /* Move the notes from the deleted insn to its replacement. */
943 REG_NOTES (insn) = REG_NOTES (del);
944
945 /* Emit the insn AFTER all the notes are transferred.
946 This is cheaper since we avoid df rescanning for the note change. */
947 insn = emit_insn_after (insn, del);
948
949 if (dump_file)
950 {
951 fprintf (dump_file,
952 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
953 print_inline_rtx (dump_file, del, 6);
954 fprintf (dump_file, "\nSTORE_MOTION replaced with insn:\n ");
955 print_inline_rtx (dump_file, insn, 6);
956 fprintf (dump_file, "\n");
957 }
958
959 delete_insn (del);
960
961 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
962 they are no longer accurate provided that they are reached by this
963 definition, so drop them. */
964 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
2ad1dda0 965 if (NONDEBUG_INSN_P (insn))
df35c271
SB
966 {
967 set = single_set (insn);
968 if (!set)
969 continue;
970 if (exp_equiv_p (SET_DEST (set), mem, 0, true))
971 return;
972 note = find_reg_equal_equiv_note (insn);
973 if (!note || !exp_equiv_p (XEXP (note, 0), mem, 0, true))
974 continue;
975
976 if (dump_file)
977 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
978 INSN_UID (insn));
979 remove_note (insn, note);
980 }
981 remove_reachable_equiv_notes (bb, smexpr);
982}
983
984
985/* Delete a store, but copy the value that would have been stored into
986 the reaching_reg for later storing. */
987
988static void
6c5d4d1a 989delete_store (struct st_expr * expr, basic_block bb)
df35c271
SB
990{
991 rtx reg, i, del;
992
993 if (expr->reaching_reg == NULL_RTX)
994 expr->reaching_reg = gen_reg_rtx_and_attrs (expr->pattern);
995
996 reg = expr->reaching_reg;
997
6c5d4d1a 998 for (i = expr->avail_stores; i; i = XEXP (i, 1))
df35c271
SB
999 {
1000 del = XEXP (i, 0);
1001 if (BLOCK_FOR_INSN (del) == bb)
1002 {
1003 /* We know there is only one since we deleted redundant
1004 ones during the available computation. */
1005 replace_store_insn (reg, del, bb, expr);
1006 break;
1007 }
1008 }
1009}
1010
1011/* Fill in available, anticipatable, transparent and kill vectors in
1012 STORE_DATA, based on lists of available and anticipatable stores. */
1013static void
1014build_store_vectors (void)
1015{
1016 basic_block bb;
1017 int *regs_set_in_block;
1018 rtx insn, st;
6c5d4d1a 1019 struct st_expr * ptr;
df35c271
SB
1020 unsigned int max_gcse_regno = max_reg_num ();
1021
1022 /* Build the gen_vector. This is any store in the table which is not killed
1023 by aliasing later in its block. */
6c5d4d1a
SB
1024 st_avloc = sbitmap_vector_alloc (last_basic_block, num_stores);
1025 sbitmap_vector_zero (st_avloc, last_basic_block);
df35c271
SB
1026
1027 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
1028 sbitmap_vector_zero (st_antloc, last_basic_block);
1029
6c5d4d1a 1030 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
df35c271 1031 {
6c5d4d1a 1032 for (st = ptr->avail_stores; st != NULL; st = XEXP (st, 1))
df35c271
SB
1033 {
1034 insn = XEXP (st, 0);
1035 bb = BLOCK_FOR_INSN (insn);
1036
1037 /* If we've already seen an available expression in this block,
1038 we can delete this one (It occurs earlier in the block). We'll
1039 copy the SRC expression to an unused register in case there
1040 are any side effects. */
6c5d4d1a 1041 if (TEST_BIT (st_avloc[bb->index], ptr->index))
df35c271
SB
1042 {
1043 rtx r = gen_reg_rtx_and_attrs (ptr->pattern);
1044 if (dump_file)
1045 fprintf (dump_file, "Removing redundant store:\n");
1046 replace_store_insn (r, XEXP (st, 0), bb, ptr);
1047 continue;
1048 }
6c5d4d1a 1049 SET_BIT (st_avloc[bb->index], ptr->index);
df35c271
SB
1050 }
1051
6c5d4d1a 1052 for (st = ptr->antic_stores; st != NULL; st = XEXP (st, 1))
df35c271
SB
1053 {
1054 insn = XEXP (st, 0);
1055 bb = BLOCK_FOR_INSN (insn);
1056 SET_BIT (st_antloc[bb->index], ptr->index);
1057 }
1058 }
1059
6c5d4d1a
SB
1060 st_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
1061 sbitmap_vector_zero (st_kill, last_basic_block);
df35c271 1062
6c5d4d1a
SB
1063 st_transp = sbitmap_vector_alloc (last_basic_block, num_stores);
1064 sbitmap_vector_zero (st_transp, last_basic_block);
df35c271
SB
1065 regs_set_in_block = XNEWVEC (int, max_gcse_regno);
1066
1067 FOR_EACH_BB (bb)
1068 {
b114d73a
SB
1069 memset (regs_set_in_block, 0, sizeof (int) * max_gcse_regno);
1070
df35c271 1071 FOR_BB_INSNS (bb, insn)
2ad1dda0 1072 if (NONDEBUG_INSN_P (insn))
df35c271
SB
1073 {
1074 df_ref *def_rec;
1075 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
1076 {
1077 unsigned int ref_regno = DF_REF_REGNO (*def_rec);
1078 if (ref_regno < max_gcse_regno)
1079 regs_set_in_block[DF_REF_REGNO (*def_rec)] = 1;
1080 }
1081 }
1082
6c5d4d1a 1083 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
df35c271
SB
1084 {
1085 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
1086 bb, regs_set_in_block, NULL))
1087 {
1088 /* It should not be necessary to consider the expression
1089 killed if it is both anticipatable and available. */
1090 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
6c5d4d1a
SB
1091 || !TEST_BIT (st_avloc[bb->index], ptr->index))
1092 SET_BIT (st_kill[bb->index], ptr->index);
df35c271
SB
1093 }
1094 else
6c5d4d1a 1095 SET_BIT (st_transp[bb->index], ptr->index);
df35c271
SB
1096 }
1097 }
1098
1099 free (regs_set_in_block);
1100
1101 if (dump_file)
1102 {
1103 dump_sbitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
6c5d4d1a
SB
1104 dump_sbitmap_vector (dump_file, "st_kill", "", st_kill, last_basic_block);
1105 dump_sbitmap_vector (dump_file, "st_transp", "", st_transp, last_basic_block);
1106 dump_sbitmap_vector (dump_file, "st_avloc", "", st_avloc, last_basic_block);
df35c271
SB
1107 }
1108}
1109
1110/* Free memory used by store motion. */
1111
1112static void
1113free_store_memory (void)
1114{
6c5d4d1a
SB
1115 free_store_motion_mems ();
1116
1117 if (st_avloc)
1118 sbitmap_vector_free (st_avloc);
1119 if (st_kill)
1120 sbitmap_vector_free (st_kill);
1121 if (st_transp)
1122 sbitmap_vector_free (st_transp);
df35c271
SB
1123 if (st_antloc)
1124 sbitmap_vector_free (st_antloc);
6c5d4d1a
SB
1125 if (st_insert_map)
1126 sbitmap_vector_free (st_insert_map);
1127 if (st_delete_map)
1128 sbitmap_vector_free (st_delete_map);
df35c271 1129
6c5d4d1a
SB
1130 st_avloc = st_kill = st_transp = st_antloc = NULL;
1131 st_insert_map = st_delete_map = NULL;
df35c271
SB
1132}
1133
1134/* Perform store motion. Much like gcse, except we move expressions the
1135 other way by looking at the flowgraph in reverse.
1136 Return non-zero if transformations are performed by the pass. */
1137
1138static int
1139one_store_motion_pass (void)
1140{
1141 basic_block bb;
1142 int x;
6c5d4d1a
SB
1143 struct st_expr * ptr;
1144 int did_edge_inserts = 0;
1145 int n_stores_deleted = 0;
1146 int n_stores_created = 0;
df35c271
SB
1147
1148 init_alias_analysis ();
1149
1150 /* Find all the available and anticipatable stores. */
1151 num_stores = compute_store_table ();
1152 if (num_stores == 0)
1153 {
6c5d4d1a
SB
1154 htab_delete (store_motion_mems_table);
1155 store_motion_mems_table = NULL;
df35c271
SB
1156 end_alias_analysis ();
1157 return 0;
1158 }
1159
1160 /* Now compute kill & transp vectors. */
1161 build_store_vectors ();
1162 add_noreturn_fake_exit_edges ();
1163 connect_infinite_loops_to_exit ();
1164
6c5d4d1a
SB
1165 edge_list = pre_edge_rev_lcm (num_stores, st_transp, st_avloc,
1166 st_antloc, st_kill, &st_insert_map,
1167 &st_delete_map);
df35c271
SB
1168
1169 /* Now we want to insert the new stores which are going to be needed. */
6c5d4d1a 1170 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
df35c271
SB
1171 {
1172 /* If any of the edges we have above are abnormal, we can't move this
1173 store. */
1174 for (x = NUM_EDGES (edge_list) - 1; x >= 0; x--)
6c5d4d1a 1175 if (TEST_BIT (st_insert_map[x], ptr->index)
df35c271
SB
1176 && (INDEX_EDGE (edge_list, x)->flags & EDGE_ABNORMAL))
1177 break;
1178
1179 if (x >= 0)
1180 {
1181 if (dump_file != NULL)
1182 fprintf (dump_file,
1183 "Can't replace store %d: abnormal edge from %d to %d\n",
1184 ptr->index, INDEX_EDGE (edge_list, x)->src->index,
1185 INDEX_EDGE (edge_list, x)->dest->index);
1186 continue;
1187 }
b8698a0f 1188
df35c271
SB
1189 /* Now we want to insert the new stores which are going to be needed. */
1190
1191 FOR_EACH_BB (bb)
6c5d4d1a 1192 if (TEST_BIT (st_delete_map[bb->index], ptr->index))
df35c271
SB
1193 {
1194 delete_store (ptr, bb);
6c5d4d1a 1195 n_stores_deleted++;
df35c271
SB
1196 }
1197
1198 for (x = 0; x < NUM_EDGES (edge_list); x++)
6c5d4d1a 1199 if (TEST_BIT (st_insert_map[x], ptr->index))
df35c271 1200 {
6c5d4d1a
SB
1201 did_edge_inserts |= insert_store (ptr, INDEX_EDGE (edge_list, x));
1202 n_stores_created++;
df35c271
SB
1203 }
1204 }
1205
6c5d4d1a 1206 if (did_edge_inserts)
df35c271
SB
1207 commit_edge_insertions ();
1208
1209 free_store_memory ();
1210 free_edge_list (edge_list);
1211 remove_fake_exit_edges ();
1212 end_alias_analysis ();
1213
1214 if (dump_file)
1215 {
1216 fprintf (dump_file, "STORE_MOTION of %s, %d basic blocks, ",
1217 current_function_name (), n_basic_blocks);
6c5d4d1a
SB
1218 fprintf (dump_file, "%d insns deleted, %d insns created\n",
1219 n_stores_deleted, n_stores_created);
df35c271
SB
1220 }
1221
6c5d4d1a 1222 return (n_stores_deleted > 0 || n_stores_created > 0);
df35c271
SB
1223}
1224
1225\f
1226static bool
1227gate_rtl_store_motion (void)
1228{
1229 return optimize > 0 && flag_gcse_sm
1230 && !cfun->calls_setjmp
1231 && optimize_function_for_speed_p (cfun)
1232 && dbg_cnt (store_motion);
1233}
1234
1235static unsigned int
1236execute_rtl_store_motion (void)
1237{
1238 delete_unreachable_blocks ();
df35c271
SB
1239 df_analyze ();
1240 flag_rerun_cse_after_global_opts |= one_store_motion_pass ();
1241 return 0;
1242}
1243
1244struct rtl_opt_pass pass_rtl_store_motion =
1245{
1246 {
1247 RTL_PASS,
1248 "store_motion", /* name */
b8698a0f
L
1249 gate_rtl_store_motion, /* gate */
1250 execute_rtl_store_motion, /* execute */
df35c271
SB
1251 NULL, /* sub */
1252 NULL, /* next */
1253 0, /* static_pass_number */
1254 TV_LSM, /* tv_id */
1255 PROP_cfglayout, /* properties_required */
1256 0, /* properties_provided */
1257 0, /* properties_destroyed */
1258 0, /* todo_flags_start */
1259 TODO_df_finish | TODO_verify_rtl_sharing |
df35c271
SB
1260 TODO_verify_flow | TODO_ggc_collect /* todo_flags_finish */
1261 }
1262};
This page took 1.710677 seconds and 5 git commands to generate.