]> gcc.gnu.org Git - gcc.git/blame - gcc/df.c
Makefile.in, [...]: replace "GNU CC" with "GCC".
[gcc.git] / gcc / df.c
CommitLineData
10c4b247
JL
1/* Dataflow support routines.
2 Copyright (C) 1999, 2000, 2001 Free Software Foundation, Inc.
3 Contributed by Michael P. Hayes (m.hayes@elec.canterbury.ac.nz,
4 mhayes@redhat.com)
5
1322177d 6This file is part of GCC.
10c4b247 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
10c4b247 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
10c4b247
JL
17
18You should have received a copy of the GNU General Public License
1322177d
LB
19along with GCC; see the file COPYING. If not, write to the Free
20Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2102111-1307, USA.
10c4b247
JL
22
23
24OVERVIEW:
25
26This file provides some dataflow routines for computing reaching defs,
27upward exposed uses, live variables, def-use chains, and use-def
28chains. The global dataflow is performed using simple iterative
29methods with a worklist and could be sped up by ordering the blocks
30with a depth first search order.
31
32A `struct ref' data structure (ref) is allocated for every register
33reference (def or use) and this records the insn and bb the ref is
34found within. The refs are linked together in chains of uses and defs
35for each insn and for each register. Each ref also has a chain field
36that links all the use refs for a def or all the def refs for a use.
37This is used to create use-def or def-use chains.
38
39
40USAGE:
41
42Here's an example of using the dataflow routines.
43
44 struct df *df;
45
46 df = df_init ();
47
48 df_analyse (df, 0, DF_ALL);
49
50 df_dump (df, DF_ALL, stderr);
51
52 df_finish (df);
53
54
55df_init simply creates a poor man's object (df) that needs to be
56passed to all the dataflow routines. df_finish destroys this
57object and frees up any allocated memory.
58
59df_analyse performs the following:
60
611. Records defs and uses by scanning the insns in each basic block
62 or by scanning the insns queued by df_insn_modify.
632. Links defs and uses into insn-def and insn-use chains.
643. Links defs and uses into reg-def and reg-use chains.
654. Assigns LUIDs to each insn (for modified blocks).
665. Calculates local reaching definitions.
676. Calculates global reaching definitions.
687. Creates use-def chains.
698. Calculates local reaching uses (upwards exposed uses).
709. Calculates global reaching uses.
7110. Creates def-use chains.
7211. Calculates local live registers.
7312. Calculates global live registers.
7413. Calculates register lifetimes and determines local registers.
75
76
77PHILOSOPHY:
78
79Note that the dataflow information is not updated for every newly
80deleted or created insn. If the dataflow information requires
81updating then all the changed, new, or deleted insns needs to be
82marked with df_insn_modify (or df_insns_modify) either directly or
83indirectly (say through calling df_insn_delete). df_insn_modify
84marks all the modified insns to get processed the next time df_analyse
85 is called.
86
87Beware that tinkering with insns may invalidate the dataflow information.
88The philosophy behind these routines is that once the dataflow
89information has been gathered, the user should store what they require
90before they tinker with any insn. Once a reg is replaced, for example,
91then the reg-def/reg-use chains will point to the wrong place. Once a
92whole lot of changes have been made, df_analyse can be called again
93to update the dataflow information. Currently, this is not very smart
94with regard to propagating changes to the dataflow so it should not
95be called very often.
96
97
98DATA STRUCTURES:
99
100The basic object is a REF (reference) and this may either be a DEF
101(definition) or a USE of a register.
102
103These are linked into a variety of lists; namely reg-def, reg-use,
104 insn-def, insn-use, def-use, and use-def lists. For example,
105the reg-def lists contain all the refs that define a given register
106while the insn-use lists contain all the refs used by an insn.
107
108Note that the reg-def and reg-use chains are generally short (except for the
109hard registers) and thus it is much faster to search these chains
110rather than searching the def or use bitmaps.
111
112If the insns are in SSA form then the reg-def and use-def lists
113should only contain the single defining ref.
114
115TODO:
116
1171) Incremental dataflow analysis.
118
119Note that if a loop invariant insn is hoisted (or sunk), we do not
120need to change the def-use or use-def chains. All we have to do is to
121change the bb field for all the associated defs and uses and to
122renumber the LUIDs for the original and new basic blocks of the insn.
123
124When shadowing loop mems we create new uses and defs for new pseudos
125so we do not affect the existing dataflow information.
126
127My current strategy is to queue up all modified, created, or deleted
128insns so when df_analyse is called we can easily determine all the new
129or deleted refs. Currently the global dataflow information is
130recomputed from scratch but this could be propagated more efficiently.
131
1322) Improved global data flow computation using depth first search.
133
1343) Reduced memory requirements.
135
136We could operate a pool of ref structures. When a ref is deleted it
137gets returned to the pool (say by linking on to a chain of free refs).
138This will require a pair of bitmaps for defs and uses so that we can
139tell which ones have been changed. Alternatively, we could
140periodically squeeze the def and use tables and associated bitmaps and
141renumber the def and use ids.
142
1434) Ordering of reg-def and reg-use lists.
144
145Should the first entry in the def list be the first def (within a BB)?
146Similarly, should the first entry in the use list be the last use
147(within a BB)?
148
1495) Working with a sub-CFG.
150
151Often the whole CFG does not need to be analysed, for example,
152when optimising a loop, only certain registers are of interest.
153Perhaps there should be a bitmap argument to df_analyse to specify
154 which registers should be analysed? */
155
156#define HANDLE_SUBREG
157
158#include "config.h"
159#include "system.h"
160#include "rtl.h"
161#include "insn-config.h"
162#include "recog.h"
163#include "function.h"
164#include "regs.h"
165#include "obstack.h"
166#include "hard-reg-set.h"
167#include "basic-block.h"
168#include "bitmap.h"
169#include "df.h"
170
171
172#define FOR_ALL_BBS(BB, CODE) \
173do { \
174 int node_; \
175 for (node_ = 0; node_ < n_basic_blocks; node_++) \
176 {(BB) = BASIC_BLOCK (node_); CODE;};} while (0)
177
178#define FOR_EACH_BB_IN_BITMAP(BITMAP, MIN, BB, CODE) \
179do { \
180 unsigned int node_; \
181 EXECUTE_IF_SET_IN_BITMAP (BITMAP, MIN, node_, \
182 {(BB) = BASIC_BLOCK (node_); CODE;});} while (0)
183
184#define FOR_EACH_BB_IN_BITMAP_REV(BITMAP, MIN, BB, CODE) \
185do { \
186 unsigned int node_; \
187 EXECUTE_IF_SET_IN_BITMAP_REV (BITMAP, node_, \
188 {(BB) = BASIC_BLOCK (node_); CODE;});} while (0)
189
190#define FOR_EACH_BB_IN_SBITMAP(BITMAP, MIN, BB, CODE) \
191do { \
192 unsigned int node_; \
193 EXECUTE_IF_SET_IN_SBITMAP (BITMAP, MIN, node_, \
194 {(BB) = BASIC_BLOCK (node_); CODE;});} while (0)
195
196#define obstack_chunk_alloc xmalloc
197#define obstack_chunk_free free
198
199static struct obstack df_ref_obstack;
200static struct df *ddf;
201
202static void df_reg_table_realloc PARAMS((struct df *, int));
203#if 0
204static void df_def_table_realloc PARAMS((struct df *, int));
205#endif
206static void df_insn_table_realloc PARAMS((struct df *, int));
207static void df_bitmaps_alloc PARAMS((struct df *, int));
208static void df_bitmaps_free PARAMS((struct df *, int));
209static void df_free PARAMS((struct df *));
210static void df_alloc PARAMS((struct df *, int));
211
212static rtx df_reg_clobber_gen PARAMS((unsigned int));
213static rtx df_reg_use_gen PARAMS((unsigned int));
214
215static inline struct df_link *df_link_create PARAMS((struct ref *,
216 struct df_link *));
217static struct df_link *df_ref_unlink PARAMS((struct df_link **, struct ref *));
218static void df_def_unlink PARAMS((struct df *, struct ref *));
219static void df_use_unlink PARAMS((struct df *, struct ref *));
220static void df_insn_refs_unlink PARAMS ((struct df *, basic_block, rtx));
10c4b247 221#if 0
af5c573a 222static void df_bb_refs_unlink PARAMS ((struct df *, basic_block));
10c4b247
JL
223static void df_refs_unlink PARAMS ((struct df *, bitmap));
224#endif
225
226static struct ref *df_ref_create PARAMS((struct df *,
227 rtx, rtx *, basic_block, rtx,
228 enum df_ref_type));
229static void df_ref_record_1 PARAMS((struct df *, rtx, rtx *,
230 basic_block, rtx, enum df_ref_type));
231static void df_ref_record PARAMS((struct df *, rtx, rtx *,
232 basic_block bb, rtx, enum df_ref_type));
233static void df_def_record_1 PARAMS((struct df *, rtx, basic_block, rtx));
234static void df_defs_record PARAMS((struct df *, rtx, basic_block, rtx));
235static void df_uses_record PARAMS((struct df *, rtx *,
236 enum df_ref_type, basic_block, rtx));
237static void df_insn_refs_record PARAMS((struct df *, basic_block, rtx));
238static void df_bb_refs_record PARAMS((struct df *, basic_block));
239static void df_refs_record PARAMS((struct df *, bitmap));
240
241static int df_visit_next PARAMS ((struct df *, sbitmap));
242static void df_bb_reg_def_chain_create PARAMS((struct df *, basic_block));
243static void df_reg_def_chain_create PARAMS((struct df *, bitmap));
244static void df_bb_reg_use_chain_create PARAMS((struct df *, basic_block));
245static void df_reg_use_chain_create PARAMS((struct df *, bitmap));
246static void df_bb_du_chain_create PARAMS((struct df *, basic_block, bitmap));
247static void df_du_chain_create PARAMS((struct df *, bitmap));
248static void df_bb_ud_chain_create PARAMS((struct df *, basic_block));
249static void df_ud_chain_create PARAMS((struct df *, bitmap));
250static void df_rd_global_compute PARAMS((struct df *, bitmap));
251static void df_ru_global_compute PARAMS((struct df *, bitmap));
252static void df_lr_global_compute PARAMS((struct df *, bitmap));
253static void df_bb_rd_local_compute PARAMS((struct df *, basic_block));
254static void df_rd_local_compute PARAMS((struct df *, bitmap));
255static void df_bb_ru_local_compute PARAMS((struct df *, basic_block));
256static void df_ru_local_compute PARAMS((struct df *, bitmap));
257static void df_bb_lr_local_compute PARAMS((struct df *, basic_block));
258static void df_lr_local_compute PARAMS((struct df *, bitmap));
259static void df_bb_reg_info_compute PARAMS((struct df *, basic_block, bitmap));
260static void df_reg_info_compute PARAMS((struct df *, bitmap));
261
262static int df_bb_luids_set PARAMS((struct df *df, basic_block));
263static int df_luids_set PARAMS((struct df *df, bitmap));
264
265static int df_modified_p PARAMS ((struct df *, bitmap));
266static int df_refs_queue PARAMS ((struct df *));
267static int df_refs_process PARAMS ((struct df *));
268static int df_bb_refs_update PARAMS ((struct df *, basic_block));
269static int df_refs_update PARAMS ((struct df *));
270static void df_analyse_1 PARAMS((struct df *, bitmap, int, int));
271
272static void df_insns_modify PARAMS((struct df *, basic_block,
273 rtx, rtx));
274static int df_rtx_mem_replace PARAMS ((rtx *, void *));
275static int df_rtx_reg_replace PARAMS ((rtx *, void *));
276void df_refs_reg_replace PARAMS ((struct df *, bitmap,
277 struct df_link *, rtx, rtx));
278
279static int df_def_dominates_all_uses_p PARAMS((struct df *, struct ref *def));
280static int df_def_dominates_uses_p PARAMS((struct df *,
281 struct ref *def, bitmap));
282static struct ref *df_bb_regno_last_use_find PARAMS((struct df *, basic_block,
283 unsigned int));
284static struct ref *df_bb_regno_first_def_find PARAMS((struct df *, basic_block,
285 unsigned int));
286static struct ref *df_bb_insn_regno_last_use_find PARAMS((struct df *,
287 basic_block,
288 rtx, unsigned int));
289static struct ref *df_bb_insn_regno_first_def_find PARAMS((struct df *,
290 basic_block,
291 rtx, unsigned int));
292
293static void df_chain_dump PARAMS((struct df_link *, FILE *file));
294static void df_chain_dump_regno PARAMS((struct df_link *, FILE *file));
295static void df_regno_debug PARAMS ((struct df *, unsigned int, FILE *));
296static void df_ref_debug PARAMS ((struct df *, struct ref *, FILE *));
297
298\f
299/* Local memory allocation/deallocation routines. */
300
301
302/* Increase the insn info table by SIZE more elements. */
303static void
304df_insn_table_realloc (df, size)
305 struct df *df;
306 int size;
307{
308 /* Make table 25 percent larger by default. */
309 if (! size)
310 size = df->insn_size / 4;
311
312 size += df->insn_size;
313
314 df->insns = (struct insn_info *)
315 xrealloc (df->insns, size * sizeof (struct insn_info));
316
317 memset (df->insns + df->insn_size, 0,
318 (size - df->insn_size) * sizeof (struct insn_info));
319
320 df->insn_size = size;
321
322 if (! df->insns_modified)
323 {
324 df->insns_modified = BITMAP_XMALLOC ();
325 bitmap_zero (df->insns_modified);
326 }
327}
328
329
330/* Increase the reg info table by SIZE more elements. */
331static void
332df_reg_table_realloc (df, size)
333 struct df *df;
334 int size;
335{
336 /* Make table 25 percent larger by default. */
337 if (! size)
338 size = df->reg_size / 4;
339
340 size += df->reg_size;
341
342 df->regs = (struct reg_info *)
343 xrealloc (df->regs, size * sizeof (struct reg_info));
344
345 /* Zero the new entries. */
346 memset (df->regs + df->reg_size, 0,
347 (size - df->reg_size) * sizeof (struct reg_info));
348
349 df->reg_size = size;
350}
351
352
353#if 0
354/* Not currently used. */
355static void
356df_def_table_realloc (df, size)
357 struct df *df;
358 int size;
359{
360 int i;
361 struct ref *refs;
362
363 /* Make table 25 percent larger by default. */
364 if (! size)
365 size = df->def_size / 4;
366
367 df->def_size += size;
368 df->defs = xrealloc (df->defs,
369 df->def_size * sizeof (*df->defs));
370
371 /* Allocate a new block of memory and link into list of blocks
372 that will need to be freed later. */
373
374 refs = xmalloc (size * sizeof (*refs));
375
376 /* Link all the new refs together, overloading the chain field. */
377 for (i = 0; i < size - 1; i++)
378 refs[i].chain = (struct df_link *)(refs + i + 1);
379 refs[size - 1].chain = 0;
380}
381#endif
382
383
384
385/* Allocate bitmaps for each basic block. */
386static void
387df_bitmaps_alloc (df, flags)
388 struct df *df;
389 int flags;
390{
391 unsigned int i;
392 int dflags = 0;
393
394 /* Free the bitmaps if they need resizing. */
395 if ((flags & DF_LR) && df->n_regs < (unsigned int)max_reg_num ())
396 dflags |= DF_LR | DF_RU;
397 if ((flags & DF_RU) && df->n_uses < df->use_id)
398 dflags |= DF_RU;
399 if ((flags & DF_RD) && df->n_defs < df->def_id)
400 dflags |= DF_RD;
401
402 if (dflags)
403 df_bitmaps_free (df, dflags);
404
405 df->n_defs = df->def_id;
406 df->n_uses = df->use_id;
407
408 for (i = 0; i < df->n_bbs; i++)
409 {
410 basic_block bb = BASIC_BLOCK (i);
411 struct bb_info *bb_info = DF_BB_INFO (df, bb);
412
413 if (flags & DF_RD && ! bb_info->rd_in)
414 {
415 /* Allocate bitmaps for reaching definitions. */
416 bb_info->rd_kill = BITMAP_XMALLOC ();
417 bitmap_zero (bb_info->rd_kill);
418 bb_info->rd_gen = BITMAP_XMALLOC ();
419 bitmap_zero (bb_info->rd_gen);
420 bb_info->rd_in = BITMAP_XMALLOC ();
421 bb_info->rd_out = BITMAP_XMALLOC ();
422 bb_info->rd_valid = 0;
423 }
424
425 if (flags & DF_RU && ! bb_info->ru_in)
426 {
427 /* Allocate bitmaps for upward exposed uses. */
428 bb_info->ru_kill = BITMAP_XMALLOC ();
429 bitmap_zero (bb_info->ru_kill);
430 /* Note the lack of symmetry. */
431 bb_info->ru_gen = BITMAP_XMALLOC ();
432 bitmap_zero (bb_info->ru_gen);
433 bb_info->ru_in = BITMAP_XMALLOC ();
434 bb_info->ru_out = BITMAP_XMALLOC ();
435 bb_info->ru_valid = 0;
436 }
437
438 if (flags & DF_LR && ! bb_info->lr_in)
439 {
440 /* Allocate bitmaps for live variables. */
441 bb_info->lr_def = BITMAP_XMALLOC ();
442 bitmap_zero (bb_info->lr_def);
443 bb_info->lr_use = BITMAP_XMALLOC ();
444 bitmap_zero (bb_info->lr_use);
445 bb_info->lr_in = BITMAP_XMALLOC ();
446 bb_info->lr_out = BITMAP_XMALLOC ();
447 bb_info->lr_valid = 0;
448 }
449 }
450}
451
452
453/* Free bitmaps for each basic block. */
454static void
455df_bitmaps_free (df, flags)
456 struct df *df ATTRIBUTE_UNUSED;
457 int flags;
458{
459 unsigned int i;
460
461 for (i = 0; i < df->n_bbs; i++)
462 {
463 basic_block bb = BASIC_BLOCK (i);
464 struct bb_info *bb_info = DF_BB_INFO (df, bb);
465
466 if (!bb_info)
467 continue;
468
469 if ((flags & DF_RD) && bb_info->rd_in)
470 {
471 /* Free bitmaps for reaching definitions. */
472 BITMAP_XFREE (bb_info->rd_kill);
473 bb_info->rd_kill = NULL;
474 BITMAP_XFREE (bb_info->rd_gen);
475 bb_info->rd_gen = NULL;
476 BITMAP_XFREE (bb_info->rd_in);
477 bb_info->rd_in = NULL;
478 BITMAP_XFREE (bb_info->rd_out);
479 bb_info->rd_out = NULL;
480 }
481
482 if ((flags & DF_RU) && bb_info->ru_in)
483 {
484 /* Free bitmaps for upward exposed uses. */
485 BITMAP_XFREE (bb_info->ru_kill);
486 bb_info->ru_kill = NULL;
487 BITMAP_XFREE (bb_info->ru_gen);
488 bb_info->ru_gen = NULL;
489 BITMAP_XFREE (bb_info->ru_in);
490 bb_info->ru_in = NULL;
491 BITMAP_XFREE (bb_info->ru_out);
492 bb_info->ru_out = NULL;
493 }
494
495 if ((flags & DF_LR) && bb_info->lr_in)
496 {
497 /* Free bitmaps for live variables. */
498 BITMAP_XFREE (bb_info->lr_def);
499 bb_info->lr_def = NULL;
500 BITMAP_XFREE (bb_info->lr_use);
501 bb_info->lr_use = NULL;
502 BITMAP_XFREE (bb_info->lr_in);
503 bb_info->lr_in = NULL;
504 BITMAP_XFREE (bb_info->lr_out);
505 bb_info->lr_out = NULL;
506 }
507 }
508 df->flags &= ~(flags & (DF_RD | DF_RU | DF_LR));
509}
510
511
512/* Allocate and initialise dataflow memory. */
513static void
514df_alloc (df, n_regs)
515 struct df *df;
516 int n_regs;
517{
518 int n_insns;
519 int i;
520
521 gcc_obstack_init (&df_ref_obstack);
522
523 /* Perhaps we should use LUIDs to save memory for the insn_refs
524 table. This is only a small saving; a few pointers. */
525 n_insns = get_max_uid () + 1;
526
527 df->def_id = 0;
528 df->n_defs = 0;
529 /* Approximate number of defs by number of insns. */
530 df->def_size = n_insns;
531 df->defs = xmalloc (df->def_size * sizeof (*df->defs));
532
533 df->use_id = 0;
534 df->n_uses = 0;
535 /* Approximate number of uses by twice number of insns. */
536 df->use_size = n_insns * 2;
537 df->uses = xmalloc (df->use_size * sizeof (*df->uses));
538
539 df->n_regs = n_regs;
540 df->n_bbs = n_basic_blocks;
541
542 /* Allocate temporary working array used during local dataflow analysis. */
543 df->reg_def_last = xmalloc (df->n_regs * sizeof (struct ref *));
544
545 df_insn_table_realloc (df, n_insns);
546
547 df_reg_table_realloc (df, df->n_regs);
548
549 df->bbs_modified = BITMAP_XMALLOC ();
550 bitmap_zero (df->bbs_modified);
551
552 df->flags = 0;
553
554 df->bbs = xcalloc (df->n_bbs, sizeof (struct bb_info));
555
556 df->all_blocks = BITMAP_XMALLOC ();
557 for (i = 0; i < n_basic_blocks; i++)
558 bitmap_set_bit (df->all_blocks, i);
559}
560
561
562/* Free all the dataflow info. */
563static void
564df_free (df)
565 struct df *df;
566{
567 df_bitmaps_free (df, DF_ALL);
568
569 if (df->bbs)
570 free (df->bbs);
571 df->bbs = 0;
572
573 if (df->insns)
574 free (df->insns);
575 df->insns = 0;
576 df->insn_size = 0;
577
578 if (df->defs)
579 free (df->defs);
580 df->defs = 0;
581 df->def_size = 0;
582 df->def_id = 0;
583
584 if (df->uses)
585 free (df->uses);
586 df->uses = 0;
587 df->use_size = 0;
588 df->use_id = 0;
589
590 if (df->regs)
591 free (df->regs);
592 df->regs = 0;
593 df->reg_size = 0;
594
595 if (df->bbs_modified)
596 BITMAP_XFREE (df->bbs_modified);
597 df->bbs_modified = 0;
598
599 if (df->insns_modified)
600 BITMAP_XFREE (df->insns_modified);
601 df->insns_modified = 0;
602
603 BITMAP_XFREE (df->all_blocks);
604 df->all_blocks = 0;
605
606 obstack_free (&df_ref_obstack, NULL);
607}
608\f
609/* Local miscellaneous routines. */
610
611/* Return a USE for register REGNO. */
612static rtx df_reg_use_gen (regno)
613 unsigned int regno;
614{
615 rtx reg;
616 rtx use;
617
618 reg = regno >= FIRST_PSEUDO_REGISTER
619 ? regno_reg_rtx[regno] : gen_rtx_REG (reg_raw_mode[regno], regno);
620
621 use = gen_rtx_USE (GET_MODE (reg), reg);
622 return use;
623}
624
625
626/* Return a CLOBBER for register REGNO. */
627static rtx df_reg_clobber_gen (regno)
628 unsigned int regno;
629{
630 rtx reg;
631 rtx use;
632
633 reg = regno >= FIRST_PSEUDO_REGISTER
634 ? regno_reg_rtx[regno] : gen_rtx_REG (reg_raw_mode[regno], regno);
635
636 use = gen_rtx_CLOBBER (GET_MODE (reg), reg);
637 return use;
638}
639\f
640/* Local chain manipulation routines. */
641
642/* Create a link in a def-use or use-def chain. */
643static inline struct df_link *
644df_link_create (ref, next)
645 struct ref *ref;
646 struct df_link *next;
647{
648 struct df_link *link;
649
650 link = (struct df_link *) obstack_alloc (&df_ref_obstack,
651 sizeof (*link));
652 link->next = next;
653 link->ref = ref;
654 return link;
655}
656
657
658/* Add REF to chain head pointed to by PHEAD. */
659static struct df_link *
660df_ref_unlink (phead, ref)
661 struct df_link **phead;
662 struct ref *ref;
663{
664 struct df_link *link = *phead;
665
666 if (link)
667 {
668 if (! link->next)
669 {
670 /* Only a single ref. It must be the one we want.
671 If not, the def-use and use-def chains are likely to
672 be inconsistent. */
673 if (link->ref != ref)
674 abort ();
675 /* Now have an empty chain. */
676 *phead = NULL;
677 }
678 else
679 {
680 /* Multiple refs. One of them must be us. */
681 if (link->ref == ref)
682 *phead = link->next;
683 else
684 {
685 /* Follow chain. */
686 for (; link->next; link = link->next)
687 {
688 if (link->next->ref == ref)
689 {
690 /* Unlink from list. */
691 link->next = link->next->next;
692 return link->next;
693 }
694 }
695 }
696 }
697 }
698 return link;
699}
700
701
702/* Unlink REF from all def-use/use-def chains, etc. */
703int
704df_ref_remove (df, ref)
705 struct df *df;
706 struct ref *ref;
707{
708 if (DF_REF_REG_DEF_P (ref))
709 {
710 df_def_unlink (df, ref);
711 df_ref_unlink (&df->insns[DF_REF_INSN_UID (ref)].defs, ref);
712 }
713 else
714 {
715 df_use_unlink (df, ref);
716 df_ref_unlink (&df->insns[DF_REF_INSN_UID (ref)].uses, ref);
717 }
718 return 1;
719}
720
721
722/* Unlink DEF from use-def and reg-def chains. */
723static void
724df_def_unlink (df, def)
725 struct df *df ATTRIBUTE_UNUSED;
726 struct ref *def;
727{
728 struct df_link *du_link;
729 unsigned int dregno = DF_REF_REGNO (def);
730
731 /* Follow def-use chain to find all the uses of this def. */
732 for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
733 {
734 struct ref *use = du_link->ref;
735
736 /* Unlink this def from the use-def chain. */
737 df_ref_unlink (&DF_REF_CHAIN (use), def);
738 }
739 DF_REF_CHAIN (def) = 0;
740
741 /* Unlink def from reg-def chain. */
742 df_ref_unlink (&df->regs[dregno].defs, def);
743
744 df->defs[DF_REF_ID (def)] = 0;
745}
746
747
748/* Unlink use from def-use and reg-use chains. */
749static void
750df_use_unlink (df, use)
751 struct df *df ATTRIBUTE_UNUSED;
752 struct ref *use;
753{
754 struct df_link *ud_link;
755 unsigned int uregno = DF_REF_REGNO (use);
756
757 /* Follow use-def chain to find all the defs of this use. */
758 for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
759 {
760 struct ref *def = ud_link->ref;
761
762 /* Unlink this use from the def-use chain. */
763 df_ref_unlink (&DF_REF_CHAIN (def), use);
764 }
765 DF_REF_CHAIN (use) = 0;
766
767 /* Unlink use from reg-use chain. */
768 df_ref_unlink (&df->regs[uregno].uses, use);
769
770 df->uses[DF_REF_ID (use)] = 0;
771}
772\f
773/* Local routines for recording refs. */
774
775
776/* Create a new ref of type DF_REF_TYPE for register REG at address
777 LOC within INSN of BB. */
778static struct ref *
779df_ref_create (df, reg, loc, bb, insn, ref_type)
780 struct df *df;
781 rtx reg;
782 rtx *loc;
783 basic_block bb;
784 rtx insn;
785 enum df_ref_type ref_type;
786{
787 struct ref *this_ref;
788 unsigned int uid;
789
790 this_ref = (struct ref *) obstack_alloc (&df_ref_obstack,
791 sizeof (*this_ref));
792 DF_REF_REG (this_ref) = reg;
793 DF_REF_LOC (this_ref) = loc;
794 DF_REF_BB (this_ref) = bb;
795 DF_REF_INSN (this_ref) = insn;
796 DF_REF_CHAIN (this_ref) = 0;
797 DF_REF_TYPE (this_ref) = ref_type;
798 uid = INSN_UID (insn);
799
800 if (ref_type == DF_REF_REG_DEF)
801 {
802 if (df->def_id >= df->def_size)
803 {
804 /* Make table 25 percent larger. */
805 df->def_size += (df->def_size / 4);
806 df->defs = xrealloc (df->defs,
807 df->def_size * sizeof (*df->defs));
808 }
809 DF_REF_ID (this_ref) = df->def_id;
810 df->defs[df->def_id++] = this_ref;
811 }
812 else
813 {
814 if (df->use_id >= df->use_size)
815 {
816 /* Make table 25 percent larger. */
817 df->use_size += (df->use_size / 4);
818 df->uses = xrealloc (df->uses,
819 df->use_size * sizeof (*df->uses));
820 }
821 DF_REF_ID (this_ref) = df->use_id;
822 df->uses[df->use_id++] = this_ref;
823 }
824 return this_ref;
825}
826
827
828/* Create a new reference of type DF_REF_TYPE for a single register REG,
829 used inside the LOC rtx of INSN. */
830static void
831df_ref_record_1 (df, reg, loc, bb, insn, ref_type)
832 struct df *df;
833 rtx reg;
834 rtx *loc;
835 basic_block bb;
836 rtx insn;
837 enum df_ref_type ref_type;
838{
839 df_ref_create (df, reg, loc, bb, insn, ref_type);
840}
841
842
843/* Create new references of type DF_REF_TYPE for each part of register REG
844 at address LOC within INSN of BB. */
845static void
846df_ref_record (df, reg, loc, bb, insn, ref_type)
847 struct df *df;
848 rtx reg;
849 rtx *loc;
850 basic_block bb;
851 rtx insn;
852 enum df_ref_type ref_type;
853{
854 unsigned int regno;
855
856 if (GET_CODE (reg) != REG && GET_CODE (reg) != SUBREG)
857 abort ();
858
859 /* For the reg allocator we are interested in some SUBREG rtx's, but not
860 all. Notably only those representing a word extraction from a multi-word
861 reg. As written in the docu those should have the form
862 (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
863 XXX Is that true? We could also use the global word_mode variable. */
864 if (GET_CODE (reg) == SUBREG
865 && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode)
866 || GET_MODE_SIZE (GET_MODE (reg))
867 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg)))))
868 {
869 loc = &SUBREG_REG (reg);
870 reg = *loc;
871 }
872
873 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
874 if (regno < FIRST_PSEUDO_REGISTER)
875 {
876 int i;
877 int endregno;
878
879 if (! (df->flags & DF_HARD_REGS))
880 return;
881
882 /* GET_MODE (reg) is correct here. We don't want to go into a SUBREG
883 for the mode, because we only want to add references to regs, which
884 are really referenced. E.g. a (subreg:SI (reg:DI 0) 0) does _not_
885 reference the whole reg 0 in DI mode (which would also include
886 reg 1, at least, if 0 and 1 are SImode registers). */
887 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
888
889 for (i = regno; i < endregno; i++)
890 df_ref_record_1 (df, gen_rtx_REG (reg_raw_mode[i], i),
891 loc, bb, insn, ref_type);
892 }
893 else
894 {
895 df_ref_record_1 (df, reg, loc, bb, insn, ref_type);
896 }
897}
898
899
900/* Process all the registers defined in the rtx, X. */
901static void
902df_def_record_1 (df, x, bb, insn)
903 struct df *df;
904 rtx x;
905 basic_block bb;
906 rtx insn;
907{
908 rtx *loc = &SET_DEST (x);
909 rtx dst = *loc;
910
911 /* Some targets place small structures in registers for
912 return values of functions. */
913 if (GET_CODE (dst) == PARALLEL && GET_MODE (dst) == BLKmode)
914 {
915 int i;
916
917 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
918 df_def_record_1 (df, XVECEXP (dst, 0, i), bb, insn);
919 return;
920 }
921
922 /* May be, we should flag the use of strict_low_part somehow. Might be
923 handy for the reg allocator. */
924#ifdef HANDLE_SUBREG
925 while (GET_CODE (dst) == STRICT_LOW_PART
926 || GET_CODE (dst) == ZERO_EXTRACT
927 || GET_CODE (dst) == SIGN_EXTRACT)
928 {
929 loc = &XEXP (dst, 0);
930 dst = *loc;
931 }
932 /* For the reg allocator we are interested in exact register references.
933 This means, we want to know, if only a part of a register is
934 used/defd. */
935/*
936 if (GET_CODE (dst) == SUBREG)
937 {
938 loc = &XEXP (dst, 0);
939 dst = *loc;
940 } */
941#else
942
943 while (GET_CODE (dst) == SUBREG
944 || GET_CODE (dst) == ZERO_EXTRACT
945 || GET_CODE (dst) == SIGN_EXTRACT
946 || GET_CODE (dst) == STRICT_LOW_PART)
947 {
948 loc = &XEXP (dst, 0);
949 dst = *loc;
950 }
951#endif
952
953 if (GET_CODE (dst) == REG
954 || (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG))
955 df_ref_record (df, dst, loc, bb, insn, DF_REF_REG_DEF);
956}
957
958
959/* Process all the registers defined in the pattern rtx, X. */
960static void
961df_defs_record (df, x, bb, insn)
962 struct df *df;
963 rtx x;
964 basic_block bb;
965 rtx insn;
966{
967 RTX_CODE code = GET_CODE (x);
968
969 if (code == SET || code == CLOBBER)
970 {
971 /* Mark the single def within the pattern. */
972 df_def_record_1 (df, x, bb, insn);
973 }
974 else if (code == PARALLEL)
975 {
976 int i;
977
978 /* Mark the multiple defs within the pattern. */
979 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
980 {
981 code = GET_CODE (XVECEXP (x, 0, i));
982 if (code == SET || code == CLOBBER)
983 df_def_record_1 (df, XVECEXP (x, 0, i), bb, insn);
984 }
985 }
986}
987
988
989/* Process all the registers used in the rtx at address LOC. */
990static void
991df_uses_record (df, loc, ref_type, bb, insn)
992 struct df *df;
993 rtx *loc;
994 enum df_ref_type ref_type;
995 basic_block bb;
996 rtx insn;
997{
998 RTX_CODE code;
999 rtx x;
1000
1001 retry:
1002 x = *loc;
1003 code = GET_CODE (x);
1004 switch (code)
1005 {
1006 case LABEL_REF:
1007 case SYMBOL_REF:
1008 case CONST_INT:
1009 case CONST:
1010 case CONST_DOUBLE:
1011 case PC:
1012 case ADDR_VEC:
1013 case ADDR_DIFF_VEC:
1014 return;
1015
1016 case CLOBBER:
1017 /* If we are clobbering a MEM, mark any registers inside the address
1018 as being used. */
1019 if (GET_CODE (XEXP (x, 0)) == MEM)
1020 df_uses_record (df, &XEXP (XEXP (x, 0), 0),
1021 DF_REF_REG_MEM_STORE, bb, insn);
1022
1023 /* If we're clobbering a REG then we have a def so ignore. */
1024 return;
1025
1026 case MEM:
1027 df_uses_record (df, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn);
1028 return;
1029
1030 case SUBREG:
1031 /* While we're here, optimize this case. */
1032#if defined(HANDLE_SUBREG)
1033
1034 /* In case the SUBREG is not of a register, don't optimize. */
1035 if (GET_CODE (SUBREG_REG (x)) != REG)
1036 {
1037 loc = &SUBREG_REG (x);
1038 df_uses_record (df, loc, ref_type, bb, insn);
1039 return;
1040 }
1041#else
1042 loc = &SUBREG_REG (x);
1043 x = *loc;
1044 if (GET_CODE (x) != REG)
1045 {
1046 df_uses_record (df, loc, ref_type, bb, insn);
1047 return;
1048 }
1049#endif
1050
1051 /* ... Fall through ... */
1052
1053 case REG:
1054 /* See a register (or subreg) other than being set. */
1055 df_ref_record (df, x, loc, bb, insn, ref_type);
1056 return;
1057
1058 case SET:
1059 {
1060 rtx dst = SET_DEST (x);
1061 int use_dst = 0;
1062
1063 /* If storing into MEM, don't show it as being used. But do
1064 show the address as being used. */
1065 if (GET_CODE (dst) == MEM)
1066 {
1067 df_uses_record (df, &XEXP (dst, 0),
1068 DF_REF_REG_MEM_STORE,
1069 bb, insn);
1070 df_uses_record (df, &SET_SRC (x), DF_REF_REG_USE, bb, insn);
1071 return;
1072 }
1073
1074#if 1 && defined(HANDLE_SUBREG)
1075 /* Look for sets that perform a read-modify-write. */
1076 while (GET_CODE (dst) == STRICT_LOW_PART
1077 || GET_CODE (dst) == ZERO_EXTRACT
1078 || GET_CODE (dst) == SIGN_EXTRACT)
1079 {
1080 if (GET_CODE (dst) == STRICT_LOW_PART)
1081 {
1082 dst = XEXP (dst, 0);
1083 if (GET_CODE (dst) != SUBREG)
1084 abort ();
1085 /* A strict_low_part uses the whole reg not only the subreg. */
1086 df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb, insn);
1087 }
1088 else
1089 {
1090 df_uses_record (df, &XEXP (dst, 0), DF_REF_REG_USE, bb, insn);
1091 dst = XEXP (dst, 0);
1092 }
1093 }
1094 if (GET_CODE (dst) == SUBREG)
1095 {
1096 /* Paradoxical or too small subreg's are read-mod-write. */
1097 if (GET_MODE_SIZE (GET_MODE (dst)) < GET_MODE_SIZE (word_mode)
1098 || GET_MODE_SIZE (GET_MODE (dst))
1099 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dst))))
1100 use_dst = 1;
1101 }
1102 /* In the original code also some SUBREG rtx's were considered
1103 read-modify-write (those with
1104 REG_SIZE(SUBREG_REG(dst)) > REG_SIZE(dst) )
1105 e.g. a (subreg:QI (reg:SI A) 0). I can't see this. The only
1106 reason for a read cycle for reg A would be to somehow preserve
1107 the bits outside of the subreg:QI. But for this a strict_low_part
1108 was necessary anyway, and this we handled already. */
1109#else
1110 while (GET_CODE (dst) == STRICT_LOW_PART
1111 || GET_CODE (dst) == ZERO_EXTRACT
1112 || GET_CODE (dst) == SIGN_EXTRACT
1113 || GET_CODE (dst) == SUBREG)
1114 {
1115 /* A SUBREG of a smaller size does not use the old value. */
1116 if (GET_CODE (dst) != SUBREG
1117 || (REG_SIZE (SUBREG_REG (dst)) > REG_SIZE (dst)))
1118 use_dst = 1;
1119 dst = XEXP (dst, 0);
1120 }
1121#endif
1122
1123 if ((GET_CODE (dst) == PARALLEL && GET_MODE (dst) == BLKmode)
1124 || GET_CODE (dst) == REG || GET_CODE (dst) == SUBREG)
1125 {
1126#if 1 || !defined(HANDLE_SUBREG)
1127 if (use_dst)
1128 df_uses_record (df, &SET_DEST (x), DF_REF_REG_USE, bb, insn);
1129#endif
1130 df_uses_record (df, &SET_SRC (x), DF_REF_REG_USE, bb, insn);
1131 return;
1132 }
1133 }
1134 break;
1135
1136 case RETURN:
1137 break;
1138
1139 case ASM_OPERANDS:
1140 case UNSPEC_VOLATILE:
1141 case TRAP_IF:
1142 case ASM_INPUT:
1143 {
1144 /* Traditional and volatile asm instructions must be considered to use
1145 and clobber all hard registers, all pseudo-registers and all of
1146 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
1147
1148 Consider for instance a volatile asm that changes the fpu rounding
1149 mode. An insn should not be moved across this even if it only uses
1150 pseudo-regs because it might give an incorrectly rounded result.
1151
1152 For now, just mark any regs we can find in ASM_OPERANDS as
1153 used. */
1154
1155 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
1156 We can not just fall through here since then we would be confused
1157 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
1158 traditional asms unlike their normal usage. */
1159 if (code == ASM_OPERANDS)
1160 {
1161 int j;
1162
1163 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
1164 df_uses_record (df, &ASM_OPERANDS_INPUT (x, j),
1165 DF_REF_REG_USE, bb, insn);
1166 }
1167 break;
1168 }
1169
1170 case PRE_DEC:
1171 case POST_DEC:
1172 case PRE_INC:
1173 case POST_INC:
1174 case PRE_MODIFY:
1175 case POST_MODIFY:
1176 /* Catch the def of the register being modified. */
1177 df_ref_record (df, XEXP (x, 0), &XEXP (x, 0), bb, insn, DF_REF_REG_DEF);
1178
30f7a378 1179 /* ... Fall through to handle uses ... */
10c4b247
JL
1180
1181 default:
1182 break;
1183 }
1184
1185 /* Recursively scan the operands of this expression. */
1186 {
1187 register const char *fmt = GET_RTX_FORMAT (code);
1188 int i;
1189
1190 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1191 {
1192 if (fmt[i] == 'e')
1193 {
1194 /* Tail recursive case: save a function call level. */
1195 if (i == 0)
1196 {
1197 loc = &XEXP (x, 0);
1198 goto retry;
1199 }
1200 df_uses_record (df, &XEXP (x, i), ref_type, bb, insn);
1201 }
1202 else if (fmt[i] == 'E')
1203 {
1204 int j;
1205 for (j = 0; j < XVECLEN (x, i); j++)
1206 df_uses_record (df, &XVECEXP (x, i, j), ref_type,
1207 bb, insn);
1208 }
1209 }
1210 }
1211}
1212
1213
1214/* Record all the df within INSN of basic block BB. */
1215static void
1216df_insn_refs_record (df, bb, insn)
1217 struct df *df;
1218 basic_block bb;
1219 rtx insn;
1220{
1221 int i;
1222
1223 if (INSN_P (insn))
1224 {
1225 /* Record register defs */
1226 df_defs_record (df, PATTERN (insn), bb, insn);
1227
1228 if (GET_CODE (insn) == CALL_INSN)
1229 {
1230 rtx note;
1231 rtx x;
1232
1233 /* Record the registers used to pass arguments. */
1234 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
1235 note = XEXP (note, 1))
1236 {
1237 if (GET_CODE (XEXP (note, 0)) == USE)
1238 df_uses_record (df, &SET_DEST (XEXP (note, 0)), DF_REF_REG_USE,
1239 bb, insn);
1240 }
1241
1242 /* The stack ptr is used (honorarily) by a CALL insn. */
1243 x = df_reg_use_gen (STACK_POINTER_REGNUM);
1244 df_uses_record (df, &SET_DEST (x), DF_REF_REG_USE, bb, insn);
1245
1246 if (df->flags & DF_HARD_REGS)
1247 {
1248 /* Calls may also reference any of the global registers,
1249 so they are recorded as used. */
1250 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1251 if (global_regs[i])
1252 {
1253 x = df_reg_use_gen (i);
1254 df_uses_record (df, &SET_DEST (x),
1255 DF_REF_REG_USE, bb, insn);
1256 }
1257 }
1258 }
1259
1260 /* Record the register uses. */
1261 df_uses_record (df, &PATTERN (insn),
1262 DF_REF_REG_USE, bb, insn);
1263
1264
1265 if (GET_CODE (insn) == CALL_INSN)
1266 {
1267 rtx note;
1268
1269 if (df->flags & DF_HARD_REGS)
1270 {
4e2db584 1271 /* Kill all registers invalidated by a call. */
10c4b247 1272 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4e2db584 1273 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
10c4b247
JL
1274 {
1275 rtx reg_clob = df_reg_clobber_gen (i);
1276 df_defs_record (df, reg_clob, bb, insn);
1277 }
1278 }
1279
1280 /* There may be extra registers to be clobbered. */
1281 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1282 note;
1283 note = XEXP (note, 1))
1284 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1285 df_defs_record (df, XEXP (note, 0), bb, insn);
1286 }
1287 }
1288}
1289
1290
1291/* Record all the refs within the basic block BB. */
1292static void
1293df_bb_refs_record (df, bb)
1294 struct df *df;
1295 basic_block bb;
1296{
1297 rtx insn;
1298
1299 /* Scan the block an insn at a time from beginning to end. */
1300 for (insn = bb->head; ; insn = NEXT_INSN (insn))
1301 {
1302 if (INSN_P (insn))
1303 {
1304 /* Record defs within INSN. */
1305 df_insn_refs_record (df, bb, insn);
1306 }
1307 if (insn == bb->end)
1308 break;
1309 }
1310}
1311
1312
1313/* Record all the refs in the basic blocks specified by BLOCKS. */
1314static void
1315df_refs_record (df, blocks)
1316 struct df *df;
1317 bitmap blocks;
1318{
1319 basic_block bb;
1320
1321 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1322 {
1323 df_bb_refs_record (df, bb);
1324 });
1325}
1326\f
1327/* Dataflow analysis routines. */
1328
1329
1330/* Create reg-def chains for basic block BB. These are a list of
1331 definitions for each register. */
1332static void
1333df_bb_reg_def_chain_create (df, bb)
1334 struct df *df;
1335 basic_block bb;
1336{
1337 rtx insn;
1338
1339 /* Perhaps the defs should be sorted using a depth first search
1340 of the CFG (or possibly a breadth first search). We currently
1341 scan the basic blocks in reverse order so that the first defs
1342 apprear at the start of the chain. */
1343
1344 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1345 insn = PREV_INSN (insn))
1346 {
1347 struct df_link *link;
1348 unsigned int uid = INSN_UID (insn);
1349
1350 if (! INSN_P (insn))
1351 continue;
1352
1353 for (link = df->insns[uid].defs; link; link = link->next)
1354 {
1355 struct ref *def = link->ref;
1356 unsigned int dregno = DF_REF_REGNO (def);
1357
1358 df->regs[dregno].defs
1359 = df_link_create (def, df->regs[dregno].defs);
1360 }
1361 }
1362}
1363
1364
1365/* Create reg-def chains for each basic block within BLOCKS. These
1366 are a list of definitions for each register. */
1367static void
1368df_reg_def_chain_create (df, blocks)
1369 struct df *df;
1370 bitmap blocks;
1371{
1372 basic_block bb;
1373
1374 FOR_EACH_BB_IN_BITMAP/*_REV*/ (blocks, 0, bb,
1375 {
1376 df_bb_reg_def_chain_create (df, bb);
1377 });
1378}
1379
1380
1381/* Create reg-use chains for basic block BB. These are a list of uses
1382 for each register. */
1383static void
1384df_bb_reg_use_chain_create (df, bb)
1385 struct df *df;
1386 basic_block bb;
1387{
1388 rtx insn;
1389
1390 /* Scan in forward order so that the last uses appear at the
1391 start of the chain. */
1392
1393 for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1394 insn = NEXT_INSN (insn))
1395 {
1396 struct df_link *link;
1397 unsigned int uid = INSN_UID (insn);
1398
1399 if (! INSN_P (insn))
1400 continue;
1401
1402 for (link = df->insns[uid].uses; link; link = link->next)
1403 {
1404 struct ref *use = link->ref;
1405 unsigned int uregno = DF_REF_REGNO (use);
1406
1407 df->regs[uregno].uses
1408 = df_link_create (use, df->regs[uregno].uses);
1409 }
1410 }
1411}
1412
1413
1414/* Create reg-use chains for each basic block within BLOCKS. These
1415 are a list of uses for each register. */
1416static void
1417df_reg_use_chain_create (df, blocks)
1418 struct df *df;
1419 bitmap blocks;
1420{
1421 basic_block bb;
1422
1423 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1424 {
1425 df_bb_reg_use_chain_create (df, bb);
1426 });
1427}
1428
1429
1430/* Create def-use chains from reaching use bitmaps for basic block BB. */
1431static void
1432df_bb_du_chain_create (df, bb, ru)
1433 struct df *df;
1434 basic_block bb;
1435 bitmap ru;
1436{
1437 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1438 rtx insn;
1439
1440 bitmap_copy (ru, bb_info->ru_out);
1441
1442 /* For each def in BB create a linked list (chain) of uses
1443 reached from the def. */
1444 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1445 insn = PREV_INSN (insn))
1446 {
1447 struct df_link *def_link;
1448 struct df_link *use_link;
1449 unsigned int uid = INSN_UID (insn);
1450
1451 if (! INSN_P (insn))
1452 continue;
1453
1454 /* For each def in insn... */
1455 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1456 {
1457 struct ref *def = def_link->ref;
1458 unsigned int dregno = DF_REF_REGNO (def);
1459
1460 DF_REF_CHAIN (def) = 0;
1461
1462 /* While the reg-use chains are not essential, it
1463 is _much_ faster to search these short lists rather
1464 than all the reaching uses, especially for large functions. */
1465 for (use_link = df->regs[dregno].uses; use_link;
1466 use_link = use_link->next)
1467 {
1468 struct ref *use = use_link->ref;
1469
1470 if (bitmap_bit_p (ru, DF_REF_ID (use)))
1471 {
1472 DF_REF_CHAIN (def)
1473 = df_link_create (use, DF_REF_CHAIN (def));
1474
1475 bitmap_clear_bit (ru, DF_REF_ID (use));
1476 }
1477 }
1478 }
1479
1480 /* For each use in insn... */
1481 for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1482 {
1483 struct ref *use = use_link->ref;
1484 bitmap_set_bit (ru, DF_REF_ID (use));
1485 }
1486 }
1487}
1488
1489
1490/* Create def-use chains from reaching use bitmaps for basic blocks
1491 in BLOCKS. */
1492static void
1493df_du_chain_create (df, blocks)
1494 struct df *df;
1495 bitmap blocks;
1496{
1497 bitmap ru;
1498 basic_block bb;
1499
1500 ru = BITMAP_XMALLOC ();
1501
1502 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1503 {
1504 df_bb_du_chain_create (df, bb, ru);
1505 });
1506
1507 BITMAP_XFREE (ru);
1508}
1509
1510
1511/* Create use-def chains from reaching def bitmaps for basic block BB. */
1512static void
1513df_bb_ud_chain_create (df, bb)
1514 struct df *df;
1515 basic_block bb;
1516{
1517 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1518 struct ref **reg_def_last = df->reg_def_last;
1519 rtx insn;
1520
1521 memset (reg_def_last, 0, df->n_regs * sizeof (struct ref *));
1522
1523 /* For each use in BB create a linked list (chain) of defs
1524 that reach the use. */
1525 for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1526 insn = NEXT_INSN (insn))
1527 {
1528 unsigned int uid = INSN_UID (insn);
1529 struct df_link *use_link;
1530 struct df_link *def_link;
1531
1532 if (! INSN_P (insn))
1533 continue;
1534
1535 /* For each use in insn... */
1536 for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1537 {
1538 struct ref *use = use_link->ref;
1539 unsigned int regno = DF_REF_REGNO (use);
1540
1541 DF_REF_CHAIN (use) = 0;
1542
1543 /* Has regno been defined in this BB yet? If so, use
1544 the last def as the single entry for the use-def
1545 chain for this use. Otherwise, we need to add all
1546 the defs using this regno that reach the start of
1547 this BB. */
1548 if (reg_def_last[regno])
1549 {
1550 DF_REF_CHAIN (use)
1551 = df_link_create (reg_def_last[regno], 0);
1552 }
1553 else
1554 {
1555 /* While the reg-def chains are not essential, it is
1556 _much_ faster to search these short lists rather than
1557 all the reaching defs, especially for large
1558 functions. */
1559 for (def_link = df->regs[regno].defs; def_link;
1560 def_link = def_link->next)
1561 {
1562 struct ref *def = def_link->ref;
1563
1564 if (bitmap_bit_p (bb_info->rd_in, DF_REF_ID (def)))
1565 {
1566 DF_REF_CHAIN (use)
1567 = df_link_create (def, DF_REF_CHAIN (use));
1568 }
1569 }
1570 }
1571 }
1572
1573
1574 /* For each def in insn...record the last def of each reg. */
1575 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1576 {
1577 struct ref *def = def_link->ref;
1578 int dregno = DF_REF_REGNO (def);
1579
1580 reg_def_last[dregno] = def;
1581 }
1582 }
1583}
1584
1585
1586/* Create use-def chains from reaching def bitmaps for basic blocks
1587 within BLOCKS. */
1588static void
1589df_ud_chain_create (df, blocks)
1590 struct df *df;
1591 bitmap blocks;
1592{
1593 basic_block bb;
1594
1595 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1596 {
1597 df_bb_ud_chain_create (df, bb);
1598 });
1599}
1600\f
1601
1602/* Use depth first order, and the worklist, to figure out what block
30f7a378 1603 to look at next. */
10c4b247
JL
1604
1605static int
1606df_visit_next (df, blocks)
1607 struct df *df ATTRIBUTE_UNUSED;
1608 sbitmap blocks;
1609{
1610 int i=0;
1611 for (i = 0; i < n_basic_blocks; i++)
1612 if (TEST_BIT (blocks, df->rc_order[i]))
1613 return df->rc_order[i];
1614 return sbitmap_first_set_bit (blocks);
1615}
1616
1617/* Calculate reaching defs for each basic block in BLOCKS, i.e., the
1618 defs that are live at the start of a basic block. */
1619static void
1620df_rd_global_compute (df, blocks)
1621 struct df *df ATTRIBUTE_UNUSED;
1622 bitmap blocks;
1623{
1624 int i;
1625 basic_block bb;
1626 sbitmap worklist;
1627
1628 worklist = sbitmap_alloc (n_basic_blocks);
1629 sbitmap_zero (worklist);
1630
1631 /* Copy the blocklist to the worklist */
1632 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
1633 {
1634 SET_BIT (worklist, i);
1635 });
1636
1637 /* We assume that only the basic blocks in WORKLIST have been
1638 modified. */
1639 FOR_EACH_BB_IN_SBITMAP (worklist, 0, bb,
1640 {
1641 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1642
1643 bitmap_copy (bb_info->rd_out, bb_info->rd_gen);
1644 });
1645
1646 while ((i = df_visit_next (df, worklist)) >= 0)
1647 {
1648 struct bb_info *bb_info;
1649 edge e;
1650 int changed;
1651
1652 /* Remove this block from the worklist. */
1653 RESET_BIT (worklist, i);
1654
1655
1656 bb = BASIC_BLOCK (i);
1657 bb_info = DF_BB_INFO (df, bb);
1658
1659 /* Calculate union of predecessor outs. */
1660 bitmap_zero (bb_info->rd_in);
1661 for (e = bb->pred; e != 0; e = e->pred_next)
1662 {
1663 struct bb_info *pred_refs = DF_BB_INFO (df, e->src);
1664
1665 if (e->src == ENTRY_BLOCK_PTR)
1666 continue;
1667
1668 bitmap_a_or_b (bb_info->rd_in, bb_info->rd_in,
1669 pred_refs->rd_out);
1670 }
1671
1672 /* RD_OUT is the set of defs that are live at the end of the
1673 BB. These are the defs that are either generated by defs
1674 (RD_GEN) within the BB or are live at the start (RD_IN)
1675 and are not killed by other defs (RD_KILL). */
1676 changed = bitmap_union_of_diff (bb_info->rd_out, bb_info->rd_gen,
1677 bb_info->rd_in, bb_info->rd_kill);
1678
1679 if (changed)
1680 {
1681 /* Add each of this block's successors to the worklist. */
1682 for (e = bb->succ; e != 0; e = e->succ_next)
1683 {
1684 if (e->dest == EXIT_BLOCK_PTR)
1685 continue;
1686
12285d9d 1687 SET_BIT (worklist, e->dest->index);
10c4b247
JL
1688 }
1689 }
1690 }
1691 sbitmap_free (worklist);
1692}
1693
1694
1695/* Calculate reaching uses for each basic block within BLOCKS, i.e.,
1696 the uses that are live at the start of a basic block. */
1697static void
1698df_ru_global_compute (df, blocks)
1699 struct df *df ATTRIBUTE_UNUSED;
1700 bitmap blocks;
1701{
1702 int i;
1703 basic_block bb;
1704 sbitmap worklist;
1705
1706 worklist = sbitmap_alloc (n_basic_blocks);
1707 sbitmap_zero (worklist);
1708
1709 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
1710 {
1711 SET_BIT (worklist, i);
1712 });
1713
1714 /* We assume that only the basic blocks in WORKLIST have been
1715 modified. */
1716 FOR_EACH_BB_IN_SBITMAP (worklist, 0, bb,
1717 {
1718 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1719
1720 bitmap_copy (bb_info->ru_in, bb_info->ru_gen);
1721 });
1722
1723
1724 while ((i = df_visit_next (df, worklist)) >= 0)
1725 {
1726 struct bb_info *bb_info;
1727 edge e;
1728 int changed;
1729
1730 /* Remove this block from the worklist. */
1731 RESET_BIT (worklist, i);
1732
1733 bb = BASIC_BLOCK (i);
1734 bb_info = DF_BB_INFO (df, bb);
1735
1736 /* Calculate union of successor ins. */
1737 bitmap_zero (bb_info->ru_out);
1738 for (e = bb->succ; e != 0; e = e->succ_next)
1739 {
1740 struct bb_info *succ_refs = DF_BB_INFO (df, e->dest);
1741
1742 if (e->dest == EXIT_BLOCK_PTR)
1743 continue;
1744
1745 bitmap_a_or_b (bb_info->ru_out, bb_info->ru_out,
1746 succ_refs->ru_in);
1747 }
1748
1749 /* RU_IN is the set of uses that are live at the start of the
1750 BB. These are the uses that are either generated within the
1751 BB (RU_GEN) or are live at the end (RU_OUT) and are not uses
1752 killed by defs within the BB (RU_KILL). */
1753 changed = bitmap_union_of_diff (bb_info->ru_in, bb_info->ru_gen,
1754 bb_info->ru_out, bb_info->ru_kill);
1755
1756 if (changed)
1757 {
1758 /* Add each of this block's predecessors to the worklist. */
1759 for (e = bb->pred; e != 0; e = e->pred_next)
1760 {
1761 if (e->src == ENTRY_BLOCK_PTR)
1762 continue;
1763
12285d9d 1764 SET_BIT (worklist, e->src->index);
10c4b247
JL
1765 }
1766 }
1767 }
1768
1769 sbitmap_free (worklist);
1770}
1771
1772
1773/* Calculate live registers for each basic block within BLOCKS. */
1774static void
1775df_lr_global_compute (df, blocks)
1776 struct df *df ATTRIBUTE_UNUSED;
1777 bitmap blocks;
1778{
1779 int i;
1780 basic_block bb;
1781 bitmap worklist;
1782
1783 worklist = BITMAP_XMALLOC ();
1784 bitmap_copy (worklist, blocks);
1785
1786 /* We assume that only the basic blocks in WORKLIST have been
1787 modified. */
1788 FOR_EACH_BB_IN_BITMAP (worklist, 0, bb,
1789 {
1790 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1791
1792 bitmap_copy (bb_info->lr_in, bb_info->lr_use);
1793 });
1794
1795 while ((i = bitmap_last_set_bit (worklist)) >= 0)
1796 {
1797 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1798 edge e;
1799 int changed;
1800
1801 /* Remove this block from the worklist. */
1802 bitmap_clear_bit (worklist, i);
1803
1804 bb = BASIC_BLOCK (i);
1805 bb_info = DF_BB_INFO (df, bb);
1806
1807 /* Calculate union of successor ins. */
1808 bitmap_zero (bb_info->lr_out);
1809 for (e = bb->succ; e != 0; e = e->succ_next)
1810 {
1811 struct bb_info *succ_refs = DF_BB_INFO (df, e->dest);
1812
1813 if (e->dest == EXIT_BLOCK_PTR)
1814 continue;
1815
1816 bitmap_a_or_b (bb_info->lr_out, bb_info->lr_out,
1817 succ_refs->lr_in);
1818 }
1819
1820 /* LR_IN is the set of uses that are live at the start of the
1821 BB. These are the uses that are either generated by uses
1822 (LR_USE) within the BB or are live at the end (LR_OUT)
1823 and are not killed by other uses (LR_DEF). */
1824 changed = bitmap_union_of_diff (bb_info->lr_in, bb_info->lr_use,
1825 bb_info->lr_out, bb_info->lr_def);
1826
1827 if (changed)
1828 {
1829 /* Add each of this block's predecessors to the worklist. */
1830 for (e = bb->pred; e != 0; e = e->pred_next)
1831 {
1832 if (e->src == ENTRY_BLOCK_PTR)
1833 continue;
1834
1835 bitmap_set_bit (worklist, e->src->index);
1836 }
1837 }
1838 }
1839 BITMAP_XFREE (worklist);
1840}
1841
1842
1843/* Compute local reaching def info for basic block BB. */
1844static void
1845df_bb_rd_local_compute (df, bb)
1846 struct df *df;
1847 basic_block bb;
1848{
1849 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1850 rtx insn;
1851
1852 for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1853 insn = NEXT_INSN (insn))
1854 {
1855 unsigned int uid = INSN_UID (insn);
1856 struct df_link *def_link;
1857
1858 if (! INSN_P (insn))
1859 continue;
1860
1861 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1862 {
1863 struct ref *def = def_link->ref;
1864 unsigned int regno = DF_REF_REGNO (def);
1865 struct df_link *def2_link;
1866
1867 for (def2_link = df->regs[regno].defs; def2_link;
1868 def2_link = def2_link->next)
1869 {
1870 struct ref *def2 = def2_link->ref;
1871
1872 /* Add all defs of this reg to the set of kills. This
1873 is greedy since many of these defs will not actually
1874 be killed by this BB but it keeps things a lot
1875 simpler. */
1876 bitmap_set_bit (bb_info->rd_kill, DF_REF_ID (def2));
1877
1878 /* Zap from the set of gens for this BB. */
1879 bitmap_clear_bit (bb_info->rd_gen, DF_REF_ID (def2));
1880 }
1881
1882 bitmap_set_bit (bb_info->rd_gen, DF_REF_ID (def));
1883 }
1884 }
1885
1886 bb_info->rd_valid = 1;
1887}
1888
1889
1890/* Compute local reaching def info for each basic block within BLOCKS. */
1891static void
1892df_rd_local_compute (df, blocks)
1893 struct df *df;
1894 bitmap blocks;
1895{
1896 basic_block bb;
1897
1898 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1899 {
1900 df_bb_rd_local_compute (df, bb);
1901 });
1902}
1903
1904
1905/* Compute local reaching use (upward exposed use) info for basic
1906 block BB. */
1907static void
1908df_bb_ru_local_compute (df, bb)
1909 struct df *df;
1910 basic_block bb;
1911{
1912 /* This is much more tricky than computing reaching defs. With
1913 reaching defs, defs get killed by other defs. With upwards
1914 exposed uses, these get killed by defs with the same regno. */
1915
1916 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1917 rtx insn;
1918
1919 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1920 insn = PREV_INSN (insn))
1921 {
1922 unsigned int uid = INSN_UID (insn);
1923 struct df_link *def_link;
1924 struct df_link *use_link;
1925
1926 if (! INSN_P (insn))
1927 continue;
1928
1929 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1930 {
1931 struct ref *def = def_link->ref;
1932 unsigned int dregno = DF_REF_REGNO (def);
1933
1934 for (use_link = df->regs[dregno].uses; use_link;
1935 use_link = use_link->next)
1936 {
1937 struct ref *use = use_link->ref;
1938
1939 /* Add all uses of this reg to the set of kills. This
1940 is greedy since many of these uses will not actually
1941 be killed by this BB but it keeps things a lot
1942 simpler. */
1943 bitmap_set_bit (bb_info->ru_kill, DF_REF_ID (use));
1944
1945 /* Zap from the set of gens for this BB. */
1946 bitmap_clear_bit (bb_info->ru_gen, DF_REF_ID (use));
1947 }
1948 }
1949
1950 for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1951 {
1952 struct ref *use = use_link->ref;
1953 /* Add use to set of gens in this BB. */
1954 bitmap_set_bit (bb_info->ru_gen, DF_REF_ID (use));
1955 }
1956 }
1957 bb_info->ru_valid = 1;
1958}
1959
1960
1961/* Compute local reaching use (upward exposed use) info for each basic
1962 block within BLOCKS. */
1963static void
1964df_ru_local_compute (df, blocks)
1965 struct df *df;
1966 bitmap blocks;
1967{
1968 basic_block bb;
1969
1970 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1971 {
1972 df_bb_ru_local_compute (df, bb);
1973 });
1974}
1975
1976
1977/* Compute local live variable info for basic block BB. */
1978static void
1979df_bb_lr_local_compute (df, bb)
1980 struct df *df;
1981 basic_block bb;
1982{
1983 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1984 rtx insn;
1985
1986 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1987 insn = PREV_INSN (insn))
1988 {
1989 unsigned int uid = INSN_UID (insn);
1990 struct df_link *link;
1991
1992 if (! INSN_P (insn))
1993 continue;
1994
1995 for (link = df->insns[uid].defs; link; link = link->next)
1996 {
1997 struct ref *def = link->ref;
1998 unsigned int dregno = DF_REF_REGNO (def);
1999
2000 /* Add def to set of defs in this BB. */
2001 bitmap_set_bit (bb_info->lr_def, dregno);
2002
2003 bitmap_clear_bit (bb_info->lr_use, dregno);
2004 }
2005
2006 for (link = df->insns[uid].uses; link; link = link->next)
2007 {
2008 struct ref *use = link->ref;
2009 /* Add use to set of uses in this BB. */
2010 bitmap_set_bit (bb_info->lr_use, DF_REF_REGNO (use));
2011 }
2012 }
2013 bb_info->lr_valid = 1;
2014}
2015
2016
2017/* Compute local live variable info for each basic block within BLOCKS. */
2018static void
2019df_lr_local_compute (df, blocks)
2020 struct df *df;
2021 bitmap blocks;
2022{
2023 basic_block bb;
2024
2025 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2026 {
2027 df_bb_lr_local_compute (df, bb);
2028 });
2029}
2030
2031
2032/* Compute register info: lifetime, bb, and number of defs and uses
2033 for basic block BB. */
2034static void
2035df_bb_reg_info_compute (df, bb, live)
2036 struct df *df;
2037 basic_block bb;
2038 bitmap live;
2039{
2040 struct reg_info *reg_info = df->regs;
2041 struct bb_info *bb_info = DF_BB_INFO (df, bb);
2042 rtx insn;
2043
2044 bitmap_copy (live, bb_info->lr_out);
2045
2046 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
2047 insn = PREV_INSN (insn))
2048 {
2049 unsigned int uid = INSN_UID (insn);
2050 unsigned int regno;
2051 struct df_link *link;
2052
2053 if (! INSN_P (insn))
2054 continue;
2055
2056 for (link = df->insns[uid].defs; link; link = link->next)
2057 {
2058 struct ref *def = link->ref;
2059 unsigned int dregno = DF_REF_REGNO (def);
2060
2061 /* Kill this register. */
2062 bitmap_clear_bit (live, dregno);
2063 reg_info[dregno].n_defs++;
2064 }
2065
2066 for (link = df->insns[uid].uses; link; link = link->next)
2067 {
2068 struct ref *use = link->ref;
2069 unsigned int uregno = DF_REF_REGNO (use);
2070
2071 /* This register is now live. */
2072 bitmap_set_bit (live, uregno);
2073 reg_info[uregno].n_uses++;
2074 }
2075
2076 /* Increment lifetimes of all live registers. */
2077 EXECUTE_IF_SET_IN_BITMAP (live, 0, regno,
2078 {
2079 reg_info[regno].lifetime++;
2080 });
2081 }
2082}
2083
2084
2085/* Compute register info: lifetime, bb, and number of defs and uses. */
2086static void
2087df_reg_info_compute (df, blocks)
2088 struct df *df;
2089 bitmap blocks;
2090{
2091 basic_block bb;
2092 bitmap live;
2093
2094 live = BITMAP_XMALLOC ();
2095
2096 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2097 {
2098 df_bb_reg_info_compute (df, bb, live);
2099 });
2100
2101 BITMAP_XFREE (live);
2102}
2103
2104
2105/* Assign LUIDs for BB. */
2106static int
2107df_bb_luids_set (df, bb)
2108 struct df *df;
2109 basic_block bb;
2110{
2111 rtx insn;
2112 int luid = 0;
2113
2114 /* The LUIDs are monotonically increasing for each basic block. */
2115
2116 for (insn = bb->head; ; insn = NEXT_INSN (insn))
2117 {
2118 if (INSN_P (insn))
2119 DF_INSN_LUID (df, insn) = luid++;
2120 DF_INSN_LUID (df, insn) = luid;
2121
2122 if (insn == bb->end)
2123 break;
2124 }
2125 return luid;
2126}
2127
2128
2129/* Assign LUIDs for each basic block within BLOCKS. */
2130static int
2131df_luids_set (df, blocks)
2132 struct df *df;
2133 bitmap blocks;
2134{
2135 basic_block bb;
2136 int total = 0;
2137
2138 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2139 {
2140 total += df_bb_luids_set (df, bb);
2141 });
2142 return total;
2143}
2144
2145
2146/* Perform dataflow analysis using existing DF structure for blocks
2147 within BLOCKS. If BLOCKS is zero, use all basic blocks in the CFG. */
2148static void
2149df_analyse_1 (df, blocks, flags, update)
2150 struct df *df;
2151 bitmap blocks;
2152 int flags;
2153 int update;
2154{
2155 int aflags;
2156 int dflags;
2157
2158 dflags = 0;
2159 aflags = flags;
2160 if (flags & DF_UD_CHAIN)
2161 aflags |= DF_RD | DF_RD_CHAIN;
2162
2163 if (flags & DF_DU_CHAIN)
2164 aflags |= DF_RU;
2165
2166 if (flags & DF_RU)
2167 aflags |= DF_RU_CHAIN;
2168
2169 if (flags & DF_REG_INFO)
2170 aflags |= DF_LR;
2171
2172 if (! blocks)
2173 blocks = df->all_blocks;
2174
2175 df->flags = flags;
2176 if (update)
2177 {
2178 df_refs_update (df);
2179 /* More fine grained incremental dataflow analysis would be
2180 nice. For now recompute the whole shebang for the
2181 modified blocks. */
2182#if 0
2183 df_refs_unlink (df, blocks);
2184#endif
2185 /* All the def-use, use-def chains can be potentially
2186 modified by changes in one block. The size of the
2187 bitmaps can also change. */
2188 }
2189 else
2190 {
2191 /* Scan the function for all register defs and uses. */
2192 df_refs_queue (df);
2193 df_refs_record (df, blocks);
2194
2195 /* Link all the new defs and uses to the insns. */
2196 df_refs_process (df);
2197 }
2198
2199 /* Allocate the bitmaps now the total number of defs and uses are
2200 known. If the number of defs or uses have changed, then
2201 these bitmaps need to be reallocated. */
2202 df_bitmaps_alloc (df, aflags);
2203
2204 /* Set the LUIDs for each specified basic block. */
2205 df_luids_set (df, blocks);
2206
2207 /* Recreate reg-def and reg-use chains from scratch so that first
2208 def is at the head of the reg-def chain and the last use is at
2209 the head of the reg-use chain. This is only important for
2210 regs local to a basic block as it speeds up searching. */
2211 if (aflags & DF_RD_CHAIN)
2212 {
2213 df_reg_def_chain_create (df, blocks);
2214 }
2215
2216 if (aflags & DF_RU_CHAIN)
2217 {
2218 df_reg_use_chain_create (df, blocks);
2219 }
2220
2221 df->dfs_order = xmalloc (sizeof(int) * n_basic_blocks);
2222 df->rc_order = xmalloc (sizeof(int) * n_basic_blocks);
2223
2224 flow_depth_first_order_compute (df->dfs_order, df->rc_order);
2225
2226 if (aflags & DF_RD)
2227 {
2228 /* Compute the sets of gens and kills for the defs of each bb. */
2229 df_rd_local_compute (df, df->flags & DF_RD ? blocks : df->all_blocks);
2230
2231 /* Compute the global reaching definitions. */
2232 df_rd_global_compute (df, df->all_blocks);
2233 }
2234
2235 if (aflags & DF_UD_CHAIN)
2236 {
2237 /* Create use-def chains. */
2238 df_ud_chain_create (df, df->all_blocks);
2239
2240 if (! (flags & DF_RD))
2241 dflags |= DF_RD;
2242 }
2243
2244 if (aflags & DF_RU)
2245 {
2246 /* Compute the sets of gens and kills for the upwards exposed
2247 uses in each bb. */
2248 df_ru_local_compute (df, df->flags & DF_RU ? blocks : df->all_blocks);
2249
2250 /* Compute the global reaching uses. */
2251 df_ru_global_compute (df, df->all_blocks);
2252 }
2253
2254 if (aflags & DF_DU_CHAIN)
2255 {
2256 /* Create def-use chains. */
2257 df_du_chain_create (df, df->all_blocks);
2258
2259 if (! (flags & DF_RU))
2260 dflags |= DF_RU;
2261 }
2262
2263 /* Free up bitmaps that are no longer required. */
2264 if (dflags)
2265 df_bitmaps_free (df, dflags);
2266
2267 if (aflags & DF_LR)
2268 {
2269 /* Compute the sets of defs and uses of live variables. */
2270 df_lr_local_compute (df, df->flags & DF_LR ? blocks : df->all_blocks);
2271
2272 /* Compute the global live variables. */
2273 df_lr_global_compute (df, df->all_blocks);
2274 }
2275
2276 if (aflags & DF_REG_INFO)
2277 {
2278 df_reg_info_compute (df, df->all_blocks);
2279 }
2280 free (df->dfs_order);
2281 free (df->rc_order);
2282}
2283
2284
2285/* Initialise dataflow analysis. */
2286struct df *
2287df_init ()
2288{
2289 struct df *df;
2290
2291 df = xcalloc (1, sizeof (struct df));
2292
2293 /* Squirrel away a global for debugging. */
2294 ddf = df;
2295
2296 return df;
2297}
2298
2299
2300/* Start queuing refs. */
2301static int
2302df_refs_queue (df)
2303 struct df *df;
2304{
2305 df->def_id_save = df->def_id;
2306 df->use_id_save = df->use_id;
2307 /* ???? Perhaps we should save current obstack state so that we can
2308 unwind it. */
2309 return 0;
2310}
2311
2312
2313/* Process queued refs. */
2314static int
2315df_refs_process (df)
2316 struct df *df;
2317{
2318 unsigned int i;
2319
2320 /* Build new insn-def chains. */
2321 for (i = df->def_id_save; i != df->def_id; i++)
2322 {
2323 struct ref *def = df->defs[i];
2324 unsigned int uid = DF_REF_INSN_UID (def);
2325
2326 /* Add def to head of def list for INSN. */
2327 df->insns[uid].defs
2328 = df_link_create (def, df->insns[uid].defs);
2329 }
2330
2331 /* Build new insn-use chains. */
2332 for (i = df->use_id_save; i != df->use_id; i++)
2333 {
2334 struct ref *use = df->uses[i];
2335 unsigned int uid = DF_REF_INSN_UID (use);
2336
2337 /* Add use to head of use list for INSN. */
2338 df->insns[uid].uses
2339 = df_link_create (use, df->insns[uid].uses);
2340 }
2341 return 0;
2342}
2343
2344
2345/* Update refs for basic block BB. */
2346static int
2347df_bb_refs_update (df, bb)
2348 struct df *df;
2349 basic_block bb;
2350{
2351 rtx insn;
2352 int count = 0;
2353
2354 /* While we have to scan the chain of insns for this BB, we don't
2355 need to allocate and queue a long chain of BB/INSN pairs. Using
2356 a bitmap for insns_modified saves memory and avoids queuing
2357 duplicates. */
2358
2359 for (insn = bb->head; ; insn = NEXT_INSN (insn))
2360 {
2361 unsigned int uid;
2362
2363 uid = INSN_UID (insn);
2364
2365 if (bitmap_bit_p (df->insns_modified, uid))
2366 {
2367 /* Delete any allocated refs of this insn. MPH, FIXME. */
2368 df_insn_refs_unlink (df, bb, insn);
2369
2370 /* Scan the insn for refs. */
2371 df_insn_refs_record (df, bb, insn);
2372
2373
2374 bitmap_clear_bit (df->insns_modified, uid);
2375 count++;
2376 }
2377 if (insn == bb->end)
2378 break;
2379 }
2380 return count;
2381}
2382
2383
2384/* Process all the modified/deleted insns that were queued. */
2385static int
2386df_refs_update (df)
2387 struct df *df;
2388{
2389 basic_block bb;
2390 int count = 0;
2391
2392 if ((unsigned int)max_reg_num () >= df->reg_size)
2393 df_reg_table_realloc (df, 0);
2394
2395 df_refs_queue (df);
2396
2397 FOR_EACH_BB_IN_BITMAP (df->bbs_modified, 0, bb,
2398 {
2399 count += df_bb_refs_update (df, bb);
2400 });
2401
2402 df_refs_process (df);
2403 return count;
2404}
2405
2406
2407/* Return non-zero if any of the requested blocks in the bitmap
2408 BLOCKS have been modified. */
2409static int
2410df_modified_p (df, blocks)
2411 struct df *df;
2412 bitmap blocks;
2413{
2414 unsigned int j;
2415 int update = 0;
2416
2417 for (j = 0; j < df->n_bbs; j++)
2418 if (bitmap_bit_p (df->bbs_modified, j)
2419 && (! blocks || (blocks == (bitmap) -1) || bitmap_bit_p (blocks, j)))
2420 {
2421 update = 1;
2422 break;
2423 }
2424
2425 return update;
2426}
2427
2428
2429/* Analyse dataflow info for the basic blocks specified by the bitmap
2430 BLOCKS, or for the whole CFG if BLOCKS is zero, or just for the
2431 modified blocks if BLOCKS is -1. */
2432int
2433df_analyse (df, blocks, flags)
2434 struct df *df;
2435 bitmap blocks;
2436 int flags;
2437{
2438 int update;
2439
2440 /* We could deal with additional basic blocks being created by
2441 rescanning everything again. */
2442 if (df->n_bbs && df->n_bbs != (unsigned int)n_basic_blocks)
2443 abort ();
2444
2445 update = df_modified_p (df, blocks);
2446 if (update || (flags != df->flags))
2447 {
2448 if (! blocks)
2449 {
2450 if (df->n_bbs)
2451 {
2452 /* Recompute everything from scratch. */
2453 df_free (df);
2454 }
2455 /* Allocate and initialise data structures. */
2456 df_alloc (df, max_reg_num ());
2457 df_analyse_1 (df, 0, flags, 0);
2458 update = 1;
2459 }
2460 else
2461 {
2462 if (blocks == (bitmap) -1)
2463 blocks = df->bbs_modified;
2464
2465 if (! df->n_bbs)
2466 abort ();
2467
2468 df_analyse_1 (df, blocks, flags, 1);
2469 bitmap_zero (df->bbs_modified);
2470 }
2471 }
2472 return update;
2473}
2474
2475
2476/* Free all the dataflow info and the DF structure. */
2477void
2478df_finish (df)
2479 struct df *df;
2480{
2481 df_free (df);
2482 free (df);
2483}
2484
2485
2486/* Unlink INSN from its reference information. */
2487static void
2488df_insn_refs_unlink (df, bb, insn)
2489 struct df *df;
2490 basic_block bb ATTRIBUTE_UNUSED;
2491 rtx insn;
2492{
2493 struct df_link *link;
2494 unsigned int uid;
2495
2496 uid = INSN_UID (insn);
2497
2498 /* Unlink all refs defined by this insn. */
2499 for (link = df->insns[uid].defs; link; link = link->next)
2500 df_def_unlink (df, link->ref);
2501
2502 /* Unlink all refs used by this insn. */
2503 for (link = df->insns[uid].uses; link; link = link->next)
2504 df_use_unlink (df, link->ref);
2505
2506 df->insns[uid].defs = 0;
2507 df->insns[uid].uses = 0;
2508}
2509
2510
af5c573a 2511#if 0
10c4b247
JL
2512/* Unlink all the insns within BB from their reference information. */
2513static void
2514df_bb_refs_unlink (df, bb)
2515 struct df *df;
2516 basic_block bb;
2517{
2518 rtx insn;
2519
2520 /* Scan the block an insn at a time from beginning to end. */
2521 for (insn = bb->head; ; insn = NEXT_INSN (insn))
2522 {
2523 if (INSN_P (insn))
2524 {
2525 /* Unlink refs for INSN. */
2526 df_insn_refs_unlink (df, bb, insn);
2527 }
2528 if (insn == bb->end)
2529 break;
2530 }
2531}
2532
2533
10c4b247
JL
2534/* Unlink all the refs in the basic blocks specified by BLOCKS.
2535 Not currently used. */
2536static void
2537df_refs_unlink (df, blocks)
2538 struct df *df;
2539 bitmap blocks;
2540{
2541 basic_block bb;
2542
2543 if (blocks)
2544 {
2545 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2546 {
2547 df_bb_refs_unlink (df, bb);
2548 });
2549 }
2550 else
2551 {
2552 FOR_ALL_BBS (bb,
2553 {
2554 df_bb_refs_unlink (df, bb);
2555 });
2556 }
2557}
2558#endif
2559\f
2560/* Functions to modify insns. */
2561
2562
2563/* Delete INSN and all its reference information. */
2564rtx
2565df_insn_delete (df, bb, insn)
2566 struct df *df;
2567 basic_block bb ATTRIBUTE_UNUSED;
2568 rtx insn;
2569{
2570 /* If the insn is a jump, we should perhaps call delete_insn to
2571 handle the JUMP_LABEL? */
2572
2573 /* We should not be deleting the NOTE_INSN_BASIC_BLOCK or label. */
2574 if (insn == bb->head)
2575 abort ();
2576 if (insn == bb->end)
2577 bb->end = PREV_INSN (insn);
2578
2579 /* Delete the insn. */
2580 PUT_CODE (insn, NOTE);
2581 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2582 NOTE_SOURCE_FILE (insn) = 0;
2583
2584 df_insn_modify (df, bb, insn);
2585
2586 return NEXT_INSN (insn);
2587}
2588
2589
2590/* Mark that INSN within BB may have changed (created/modified/deleted).
2591 This may be called multiple times for the same insn. There is no
2592 harm calling this function if the insn wasn't changed; it will just
2593 slow down the rescanning of refs. */
2594void
2595df_insn_modify (df, bb, insn)
2596 struct df *df;
2597 basic_block bb;
2598 rtx insn;
2599{
2600 unsigned int uid;
2601
2602 uid = INSN_UID (insn);
2603
2604 bitmap_set_bit (df->bbs_modified, bb->index);
2605 bitmap_set_bit (df->insns_modified, uid);
2606
2607#if 0
2608 /* For incremental updating on the fly, perhaps we could make a copy
2609 of all the refs of the original insn and turn them into
2610 anti-refs. When df_refs_update finds these anti-refs, it annihilates
2611 the original refs. If validate_change fails then these anti-refs
2612 will just get ignored. */
2613 */
2614#endif
2615}
2616
2617
2618typedef struct replace_args
2619{
2620 rtx match;
2621 rtx replacement;
2622 rtx insn;
2623 int modified;
2624} replace_args;
2625
2626
2627/* Replace mem pointed to by PX with its associated pseudo register.
2628 DATA is actually a pointer to a structure describing the
2629 instruction currently being scanned and the MEM we are currently
2630 replacing. */
2631static int
2632df_rtx_mem_replace (px, data)
2633 rtx *px;
2634 void *data;
2635{
2636 replace_args *args = (replace_args *) data;
2637 rtx mem = *px;
2638
2639 if (mem == NULL_RTX)
2640 return 0;
2641
2642 switch (GET_CODE (mem))
2643 {
2644 case MEM:
2645 break;
2646
2647 case CONST_DOUBLE:
2648 /* We're not interested in the MEM associated with a
2649 CONST_DOUBLE, so there's no need to traverse into one. */
2650 return -1;
2651
2652 default:
2653 /* This is not a MEM. */
2654 return 0;
2655 }
2656
2657 if (!rtx_equal_p (args->match, mem))
2658 /* This is not the MEM we are currently replacing. */
2659 return 0;
2660
2661 /* Actually replace the MEM. */
2662 validate_change (args->insn, px, args->replacement, 1);
2663 args->modified++;
2664
2665 return 0;
2666}
2667
2668
2669int
2670df_insn_mem_replace (df, bb, insn, mem, reg)
2671 struct df *df;
2672 basic_block bb;
2673 rtx insn;
2674 rtx mem;
2675 rtx reg;
2676{
2677 replace_args args;
2678
2679 args.insn = insn;
2680 args.match = mem;
2681 args.replacement = reg;
2682 args.modified = 0;
2683
2684 /* Seach and replace all matching mems within insn. */
2685 for_each_rtx (&insn, df_rtx_mem_replace, &args);
2686
2687 if (args.modified)
2688 df_insn_modify (df, bb, insn);
2689
2690 /* ???? FIXME. We may have a new def or one or more new uses of REG
2691 in INSN. REG should be a new pseudo so it won't affect the
2692 dataflow information that we currently have. We should add
2693 the new uses and defs to INSN and then recreate the chains
2694 when df_analyse is called. */
2695 return args.modified;
2696}
2697
2698
2699/* Replace one register with another. Called through for_each_rtx; PX
2700 points to the rtx being scanned. DATA is actually a pointer to a
2701 structure of arguments. */
2702static int
2703df_rtx_reg_replace (px, data)
2704 rtx *px;
2705 void *data;
2706{
2707 rtx x = *px;
2708 replace_args *args = (replace_args *) data;
2709
2710 if (x == NULL_RTX)
2711 return 0;
2712
2713 if (x == args->match)
2714 {
2715 validate_change (args->insn, px, args->replacement, 1);
2716 args->modified++;
2717 }
2718
2719 return 0;
2720}
2721
2722
2723/* Replace the reg within every ref on CHAIN that is within the set
2724 BLOCKS of basic blocks with NEWREG. Also update the regs within
2725 REG_NOTES. */
2726void
2727df_refs_reg_replace (df, blocks, chain, oldreg, newreg)
2728 struct df *df;
2729 bitmap blocks;
2730 struct df_link *chain;
2731 rtx oldreg;
2732 rtx newreg;
2733{
2734 struct df_link *link;
2735 replace_args args;
2736
2737 if (! blocks)
2738 blocks = df->all_blocks;
2739
2740 args.match = oldreg;
2741 args.replacement = newreg;
2742 args.modified = 0;
2743
2744 for (link = chain; link; link = link->next)
2745 {
2746 struct ref *ref = link->ref;
2747 rtx insn = DF_REF_INSN (ref);
2748
2749 if (! INSN_P (insn))
2750 continue;
2751
2752 if (bitmap_bit_p (blocks, DF_REF_BBNO (ref)))
2753 {
2754 df_ref_reg_replace (df, ref, oldreg, newreg);
2755
2756 /* Replace occurrences of the reg within the REG_NOTES. */
2757 if ((! link->next || DF_REF_INSN (ref)
2758 != DF_REF_INSN (link->next->ref))
2759 && REG_NOTES (insn))
2760 {
2761 args.insn = insn;
2762 for_each_rtx (&REG_NOTES (insn), df_rtx_reg_replace, &args);
2763 }
2764 }
2765 else
2766 {
2767 /* Temporary check to ensure that we have a grip on which
2768 regs should be replaced. */
2769 abort ();
2770 }
2771 }
2772}
2773
2774
2775/* Replace all occurrences of register OLDREG with register NEWREG in
2776 blocks defined by bitmap BLOCKS. This also replaces occurrences of
2777 OLDREG in the REG_NOTES but only for insns containing OLDREG. This
2778 routine expects the reg-use and reg-def chains to be valid. */
2779int
2780df_reg_replace (df, blocks, oldreg, newreg)
2781 struct df *df;
2782 bitmap blocks;
2783 rtx oldreg;
2784 rtx newreg;
2785{
2786 unsigned int oldregno = REGNO (oldreg);
2787
2788 df_refs_reg_replace (df, blocks, df->regs[oldregno].defs, oldreg, newreg);
2789 df_refs_reg_replace (df, blocks, df->regs[oldregno].uses, oldreg, newreg);
2790 return 1;
2791}
2792
2793
2794/* Try replacing the reg within REF with NEWREG. Do not modify
2795 def-use/use-def chains. */
2796int
2797df_ref_reg_replace (df, ref, oldreg, newreg)
2798 struct df *df;
2799 struct ref *ref;
2800 rtx oldreg;
2801 rtx newreg;
2802{
2803 /* Check that insn was deleted by being converted into a NOTE. If
2804 so ignore this insn. */
2805 if (! INSN_P (DF_REF_INSN (ref)))
2806 return 0;
2807
2808 if (oldreg && oldreg != DF_REF_REG (ref))
2809 abort ();
2810
2811 if (! validate_change (DF_REF_INSN (ref), DF_REF_LOC (ref), newreg, 1))
2812 return 0;
2813
2814 df_insn_modify (df, DF_REF_BB (ref), DF_REF_INSN (ref));
2815 return 1;
2816}
2817
2818
2819struct ref*
2820df_bb_def_use_swap (df, bb, def_insn, use_insn, regno)
2821 struct df * df;
2822 basic_block bb;
2823 rtx def_insn;
2824 rtx use_insn;
2825 unsigned int regno;
2826{
2827 struct ref *def;
2828 struct ref *use;
2829 int def_uid;
2830 int use_uid;
2831 struct df_link *link;
2832
2833 def = df_bb_insn_regno_first_def_find (df, bb, def_insn, regno);
2834 if (! def)
2835 return 0;
2836
2837 use = df_bb_insn_regno_last_use_find (df, bb, use_insn, regno);
2838 if (! use)
2839 return 0;
2840
2841 /* The USE no longer exists. */
2842 use_uid = INSN_UID (use_insn);
2843 df_use_unlink (df, use);
2844 df_ref_unlink (&df->insns[use_uid].uses, use);
2845
2846 /* The DEF requires shifting so remove it from DEF_INSN
2847 and add it to USE_INSN by reusing LINK. */
2848 def_uid = INSN_UID (def_insn);
2849 link = df_ref_unlink (&df->insns[def_uid].defs, def);
2850 link->ref = def;
2851 link->next = df->insns[use_uid].defs;
2852 df->insns[use_uid].defs = link;
2853
2854#if 0
2855 link = df_ref_unlink (&df->regs[regno].defs, def);
2856 link->ref = def;
2857 link->next = df->regs[regno].defs;
2858 df->insns[regno].defs = link;
2859#endif
2860
2861 DF_REF_INSN (def) = use_insn;
2862 return def;
2863}
2864
2865
2866/* Record df between FIRST_INSN and LAST_INSN inclusive. All new
2867 insns must be processed by this routine. */
2868static void
2869df_insns_modify (df, bb, first_insn, last_insn)
2870 struct df *df;
2871 basic_block bb;
2872 rtx first_insn;
2873 rtx last_insn;
2874{
2875 rtx insn;
2876
2877 for (insn = first_insn; ; insn = NEXT_INSN (insn))
2878 {
2879 unsigned int uid;
2880
2881 /* A non-const call should not have slipped through the net. If
2882 it does, we need to create a new basic block. Ouch. The
2883 same applies for a label. */
2884 if ((GET_CODE (insn) == CALL_INSN
24a28584 2885 && ! CONST_OR_PURE_CALL_P (insn))
10c4b247
JL
2886 || GET_CODE (insn) == CODE_LABEL)
2887 abort ();
2888
2889 uid = INSN_UID (insn);
2890
2891 if (uid >= df->insn_size)
2892 df_insn_table_realloc (df, 0);
2893
2894 df_insn_modify (df, bb, insn);
2895
2896 if (insn == last_insn)
2897 break;
2898 }
2899}
2900
2901
2902/* Emit PATTERN before INSN within BB. */
2903rtx
2904df_pattern_emit_before (df, pattern, bb, insn)
2905 struct df *df ATTRIBUTE_UNUSED;
2906 rtx pattern;
2907 basic_block bb;
2908 rtx insn;
2909{
2910 rtx ret_insn;
2911 rtx prev_insn = PREV_INSN (insn);
2912
2913 /* We should not be inserting before the start of the block. */
2914 if (insn == bb->head)
2915 abort ();
2916 ret_insn = emit_insn_before (pattern, insn);
2917 if (ret_insn == insn)
2918 return ret_insn;
2919
2920 df_insns_modify (df, bb, NEXT_INSN (prev_insn), ret_insn);
2921 return ret_insn;
2922}
2923
2924
2925/* Emit PATTERN after INSN within BB. */
2926rtx
2927df_pattern_emit_after (df, pattern, bb, insn)
2928 struct df *df;
2929 rtx pattern;
2930 basic_block bb;
2931 rtx insn;
2932{
2933 rtx ret_insn;
2934
2935 ret_insn = emit_insn_after (pattern, insn);
2936 if (ret_insn == insn)
2937 return ret_insn;
2938
2939 if (bb->end == insn)
2940 bb->end = ret_insn;
2941
2942 df_insns_modify (df, bb, NEXT_INSN (insn), ret_insn);
2943 return ret_insn;
2944}
2945
2946
2947/* Emit jump PATTERN after INSN within BB. */
2948rtx
2949df_jump_pattern_emit_after (df, pattern, bb, insn)
2950 struct df *df;
2951 rtx pattern;
2952 basic_block bb;
2953 rtx insn;
2954{
2955 rtx ret_insn;
2956
2957 ret_insn = emit_jump_insn_after (pattern, insn);
2958 if (ret_insn == insn)
2959 return ret_insn;
2960
2961 if (bb->end == insn)
2962 bb->end = ret_insn;
2963
2964 df_insns_modify (df, bb, NEXT_INSN (insn), ret_insn);
2965 return ret_insn;
2966}
2967
2968
2969/* Move INSN within BB before BEFORE_INSN within BEFORE_BB.
2970
2971 This function should only be used to move loop invariant insns
2972 out of a loop where it has been proven that the def-use info
2973 will still be valid. */
2974rtx
2975df_insn_move_before (df, bb, insn, before_bb, before_insn)
2976 struct df *df;
2977 basic_block bb;
2978 rtx insn;
2979 basic_block before_bb;
2980 rtx before_insn;
2981{
2982 struct df_link *link;
2983 unsigned int uid;
2984
2985 if (! bb)
2986 return df_pattern_emit_before (df, insn, before_bb, before_insn);
2987
2988 uid = INSN_UID (insn);
2989
2990 /* Change bb for all df defined and used by this insn. */
2991 for (link = df->insns[uid].defs; link; link = link->next)
2992 DF_REF_BB (link->ref) = before_bb;
2993 for (link = df->insns[uid].uses; link; link = link->next)
2994 DF_REF_BB (link->ref) = before_bb;
2995
2996 /* The lifetimes of the registers used in this insn will be reduced
2997 while the lifetimes of the registers defined in this insn
2998 are likely to be increased. */
2999
3000 /* ???? Perhaps all the insns moved should be stored on a list
3001 which df_analyse removes when it recalculates data flow. */
3002
3003 return emit_block_insn_before (insn, before_insn, before_bb);
3004}
3005\f
3006/* Functions to query dataflow information. */
3007
3008
3009int
3010df_insn_regno_def_p (df, bb, insn, regno)
3011 struct df *df;
3012 basic_block bb ATTRIBUTE_UNUSED;
3013 rtx insn;
3014 unsigned int regno;
3015{
3016 unsigned int uid;
3017 struct df_link *link;
3018
3019 uid = INSN_UID (insn);
3020
3021 for (link = df->insns[uid].defs; link; link = link->next)
3022 {
3023 struct ref *def = link->ref;
3024
3025 if (DF_REF_REGNO (def) == regno)
3026 return 1;
3027 }
3028
3029 return 0;
3030}
3031
3032
3033static int
3034df_def_dominates_all_uses_p (df, def)
3035 struct df *df ATTRIBUTE_UNUSED;
3036 struct ref *def;
3037{
3038 struct df_link *du_link;
3039
3040 /* Follow def-use chain to find all the uses of this def. */
3041 for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
3042 {
3043 struct ref *use = du_link->ref;
3044 struct df_link *ud_link;
3045
3046 /* Follow use-def chain to check all the defs for this use. */
3047 for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
3048 if (ud_link->ref != def)
3049 return 0;
3050 }
3051 return 1;
3052}
3053
3054
3055int
3056df_insn_dominates_all_uses_p (df, bb, insn)
3057 struct df *df;
3058 basic_block bb ATTRIBUTE_UNUSED;
3059 rtx insn;
3060{
3061 unsigned int uid;
3062 struct df_link *link;
3063
3064 uid = INSN_UID (insn);
3065
3066 for (link = df->insns[uid].defs; link; link = link->next)
3067 {
3068 struct ref *def = link->ref;
3069
3070 if (! df_def_dominates_all_uses_p (df, def))
3071 return 0;
3072 }
3073
3074 return 1;
3075}
3076
3077
3078/* Return non-zero if all DF dominates all the uses within the bitmap
3079 BLOCKS. */
3080static int
3081df_def_dominates_uses_p (df, def, blocks)
3082 struct df *df ATTRIBUTE_UNUSED;
3083 struct ref *def;
3084 bitmap blocks;
3085{
3086 struct df_link *du_link;
3087
3088 /* Follow def-use chain to find all the uses of this def. */
3089 for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
3090 {
3091 struct ref *use = du_link->ref;
3092 struct df_link *ud_link;
3093
3094 /* Only worry about the uses within BLOCKS. For example,
3095 consider a register defined within a loop that is live at the
3096 loop exits. */
3097 if (bitmap_bit_p (blocks, DF_REF_BBNO (use)))
3098 {
3099 /* Follow use-def chain to check all the defs for this use. */
3100 for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
3101 if (ud_link->ref != def)
3102 return 0;
3103 }
3104 }
3105 return 1;
3106}
3107
3108
3109/* Return non-zero if all the defs of INSN within BB dominates
3110 all the corresponding uses. */
3111int
3112df_insn_dominates_uses_p (df, bb, insn, blocks)
3113 struct df *df;
3114 basic_block bb ATTRIBUTE_UNUSED;
3115 rtx insn;
3116 bitmap blocks;
3117{
3118 unsigned int uid;
3119 struct df_link *link;
3120
3121 uid = INSN_UID (insn);
3122
3123 for (link = df->insns[uid].defs; link; link = link->next)
3124 {
3125 struct ref *def = link->ref;
3126
3127 /* Only consider the defs within BLOCKS. */
3128 if (bitmap_bit_p (blocks, DF_REF_BBNO (def))
3129 && ! df_def_dominates_uses_p (df, def, blocks))
3130 return 0;
3131 }
3132 return 1;
3133}
3134
3135
3136/* Return the basic block that REG referenced in or NULL if referenced
3137 in multiple basic blocks. */
3138basic_block
3139df_regno_bb (df, regno)
3140 struct df *df;
3141 unsigned int regno;
3142{
3143 struct df_link *defs = df->regs[regno].defs;
3144 struct df_link *uses = df->regs[regno].uses;
3145 struct ref *def = defs ? defs->ref : 0;
3146 struct ref *use = uses ? uses->ref : 0;
3147 basic_block bb_def = def ? DF_REF_BB (def) : 0;
3148 basic_block bb_use = use ? DF_REF_BB (use) : 0;
3149
3150 /* Compare blocks of first def and last use. ???? FIXME. What if
3151 the reg-def and reg-use lists are not correctly ordered. */
3152 return bb_def == bb_use ? bb_def : 0;
3153}
3154
3155
3156/* Return non-zero if REG used in multiple basic blocks. */
3157int
3158df_reg_global_p (df, reg)
3159 struct df *df;
3160 rtx reg;
3161{
3162 return df_regno_bb (df, REGNO (reg)) != 0;
3163}
3164
3165
3166/* Return total lifetime (in insns) of REG. */
3167int
3168df_reg_lifetime (df, reg)
3169 struct df *df;
3170 rtx reg;
3171{
3172 return df->regs[REGNO (reg)].lifetime;
3173}
3174
3175
3176/* Return non-zero if REG live at start of BB. */
3177int
3178df_bb_reg_live_start_p (df, bb, reg)
3179 struct df *df ATTRIBUTE_UNUSED;
3180 basic_block bb;
3181 rtx reg;
3182{
3183 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3184
3185#ifdef ENABLE_CHECKING
3186 if (! bb_info->lr_in)
3187 abort ();
3188#endif
3189
3190 return bitmap_bit_p (bb_info->lr_in, REGNO (reg));
3191}
3192
3193
3194/* Return non-zero if REG live at end of BB. */
3195int
3196df_bb_reg_live_end_p (df, bb, reg)
3197 struct df *df ATTRIBUTE_UNUSED;
3198 basic_block bb;
3199 rtx reg;
3200{
3201 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3202
3203#ifdef ENABLE_CHECKING
3204 if (! bb_info->lr_in)
3205 abort ();
3206#endif
3207
3208 return bitmap_bit_p (bb_info->lr_out, REGNO (reg));
3209}
3210
3211
3212/* Return -1 if life of REG1 before life of REG2, 1 if life of REG1
3213 after life of REG2, or 0, if the lives overlap. */
3214int
3215df_bb_regs_lives_compare (df, bb, reg1, reg2)
3216 struct df *df;
3217 basic_block bb;
3218 rtx reg1;
3219 rtx reg2;
3220{
3221 unsigned int regno1 = REGNO (reg1);
3222 unsigned int regno2 = REGNO (reg2);
3223 struct ref *def1;
3224 struct ref *use1;
3225 struct ref *def2;
3226 struct ref *use2;
3227
3228
3229 /* The regs must be local to BB. */
3230 if (df_regno_bb (df, regno1) != bb
3231 || df_regno_bb (df, regno2) != bb)
3232 abort ();
3233
3234 def2 = df_bb_regno_first_def_find (df, bb, regno2);
3235 use1 = df_bb_regno_last_use_find (df, bb, regno1);
3236
3237 if (DF_INSN_LUID (df, DF_REF_INSN (def2))
3238 > DF_INSN_LUID (df, DF_REF_INSN (use1)))
3239 return -1;
3240
3241 def1 = df_bb_regno_first_def_find (df, bb, regno1);
3242 use2 = df_bb_regno_last_use_find (df, bb, regno2);
3243
3244 if (DF_INSN_LUID (df, DF_REF_INSN (def1))
3245 > DF_INSN_LUID (df, DF_REF_INSN (use2)))
3246 return 1;
3247
3248 return 0;
3249}
3250
3251
3252/* Return last use of REGNO within BB. */
3253static struct ref *
3254df_bb_regno_last_use_find (df, bb, regno)
3255 struct df * df;
3256 basic_block bb ATTRIBUTE_UNUSED;
3257 unsigned int regno;
3258{
3259 struct df_link *link;
3260
3261 /* This assumes that the reg-use list is ordered such that for any
3262 BB, the last use is found first. However, since the BBs are not
3263 ordered, the first use in the chain is not necessarily the last
3264 use in the function. */
3265 for (link = df->regs[regno].uses; link; link = link->next)
3266 {
3267 struct ref *use = link->ref;
3268
3269 if (DF_REF_BB (use) == bb)
3270 return use;
3271 }
3272 return 0;
3273}
3274
3275
3276/* Return first def of REGNO within BB. */
3277static struct ref *
3278df_bb_regno_first_def_find (df, bb, regno)
3279 struct df * df;
3280 basic_block bb ATTRIBUTE_UNUSED;
3281 unsigned int regno;
3282{
3283 struct df_link *link;
3284
3285 /* This assumes that the reg-def list is ordered such that for any
3286 BB, the first def is found first. However, since the BBs are not
3287 ordered, the first def in the chain is not necessarily the first
3288 def in the function. */
3289 for (link = df->regs[regno].defs; link; link = link->next)
3290 {
3291 struct ref *def = link->ref;
3292
3293 if (DF_REF_BB (def) == bb)
3294 return def;
3295 }
3296 return 0;
3297}
3298
3299
3300/* Return first use of REGNO inside INSN within BB. */
3301static struct ref *
3302df_bb_insn_regno_last_use_find (df, bb, insn, regno)
3303 struct df * df;
3304 basic_block bb ATTRIBUTE_UNUSED;
3305 rtx insn;
3306 unsigned int regno;
3307{
3308 unsigned int uid;
3309 struct df_link *link;
3310
3311 uid = INSN_UID (insn);
3312
3313 for (link = df->insns[uid].uses; link; link = link->next)
3314 {
3315 struct ref *use = link->ref;
3316
3317 if (DF_REF_REGNO (use) == regno)
3318 return use;
3319 }
3320
3321 return 0;
3322}
3323
3324
3325/* Return first def of REGNO inside INSN within BB. */
3326static struct ref *
3327df_bb_insn_regno_first_def_find (df, bb, insn, regno)
3328 struct df * df;
3329 basic_block bb ATTRIBUTE_UNUSED;
3330 rtx insn;
3331 unsigned int regno;
3332{
3333 unsigned int uid;
3334 struct df_link *link;
3335
3336 uid = INSN_UID (insn);
3337
3338 for (link = df->insns[uid].defs; link; link = link->next)
3339 {
3340 struct ref *def = link->ref;
3341
3342 if (DF_REF_REGNO (def) == regno)
3343 return def;
3344 }
3345
3346 return 0;
3347}
3348
3349
3350/* Return insn using REG if the BB contains only a single
3351 use and def of REG. */
3352rtx
3353df_bb_single_def_use_insn_find (df, bb, insn, reg)
3354 struct df * df;
3355 basic_block bb;
3356 rtx insn;
3357 rtx reg;
3358{
3359 struct ref *def;
3360 struct ref *use;
3361 struct df_link *du_link;
3362
3363 def = df_bb_insn_regno_first_def_find (df, bb, insn, REGNO (reg));
3364
3365 if (! def)
3366 abort ();
3367
3368 du_link = DF_REF_CHAIN (def);
3369
3370 if (! du_link)
3371 return NULL_RTX;
3372
3373 use = du_link->ref;
3374
3375 /* Check if def is dead. */
3376 if (! use)
3377 return NULL_RTX;
3378
3379 /* Check for multiple uses. */
3380 if (du_link->next)
3381 return NULL_RTX;
3382
3383 return DF_REF_INSN (use);
3384}
3385\f
3386/* Functions for debugging/dumping dataflow information. */
3387
3388
3389/* Dump a def-use or use-def chain for REF to FILE. */
3390static void
3391df_chain_dump (link, file)
3392 struct df_link *link;
3393 FILE *file;
3394{
3395 fprintf (file, "{ ");
3396 for (; link; link = link->next)
3397 {
3398 fprintf (file, "%c%d ",
3399 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
3400 DF_REF_ID (link->ref));
3401 }
3402 fprintf (file, "}");
3403}
3404
3405static void
3406df_chain_dump_regno (link, file)
3407 struct df_link *link;
3408 FILE *file;
3409{
3410 fprintf (file, "{ ");
3411 for (; link; link = link->next)
3412 {
3413 fprintf (file, "%c%d(%d) ",
3414 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
3415 DF_REF_ID (link->ref),
3416 DF_REF_REGNO (link->ref));
3417 }
3418 fprintf (file, "}");
3419}
3420
3421/* Dump dataflow info. */
3422void
3423df_dump (df, flags, file)
3424 struct df *df;
3425 int flags;
3426 FILE *file;
3427{
3428 unsigned int i;
3429 unsigned int j;
3430
3431 if (! df || ! file)
3432 return;
3433
3434 fprintf (file, "\nDataflow summary:\n");
3435 fprintf (file, "n_regs = %d, n_defs = %d, n_uses = %d, n_bbs = %d\n",
3436 df->n_regs, df->n_defs, df->n_uses, df->n_bbs);
3437
3438 if (flags & DF_RD)
3439 {
3440 fprintf (file, "Reaching defs:\n");
3441 for (i = 0; i < df->n_bbs; i++)
3442 {
3443 basic_block bb = BASIC_BLOCK (i);
3444 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3445
3446 if (! bb_info->rd_in)
3447 continue;
3448
3449 fprintf (file, "bb %d in \t", i);
3450 dump_bitmap (file, bb_info->rd_in);
3451 fprintf (file, "bb %d gen \t", i);
3452 dump_bitmap (file, bb_info->rd_gen);
3453 fprintf (file, "bb %d kill\t", i);
3454 dump_bitmap (file, bb_info->rd_kill);
3455 fprintf (file, "bb %d out \t", i);
3456 dump_bitmap (file, bb_info->rd_out);
3457 }
3458 }
3459
3460 if (flags & DF_UD_CHAIN)
3461 {
3462 fprintf (file, "Use-def chains:\n");
3463 for (j = 0; j < df->n_defs; j++)
3464 {
3465 if (df->defs[j])
3466 {
3467 fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
3468 j, DF_REF_BBNO (df->defs[j]),
3469 DF_INSN_LUID (df, DF_REF_INSN (df->defs[j])),
3470 DF_REF_INSN_UID (df->defs[j]),
3471 DF_REF_REGNO (df->defs[j]));
3472 df_chain_dump (DF_REF_CHAIN (df->defs[j]), file);
3473 fprintf (file, "\n");
3474 }
3475 }
3476 }
3477
3478 if (flags & DF_RU)
3479 {
3480 fprintf (file, "Reaching uses:\n");
3481 for (i = 0; i < df->n_bbs; i++)
3482 {
3483 basic_block bb = BASIC_BLOCK (i);
3484 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3485
3486 if (! bb_info->ru_in)
3487 continue;
3488
3489 fprintf (file, "bb %d in \t", i);
3490 dump_bitmap (file, bb_info->ru_in);
3491 fprintf (file, "bb %d gen \t", i);
3492 dump_bitmap (file, bb_info->ru_gen);
3493 fprintf (file, "bb %d kill\t", i);
3494 dump_bitmap (file, bb_info->ru_kill);
3495 fprintf (file, "bb %d out \t", i);
3496 dump_bitmap (file, bb_info->ru_out);
3497 }
3498 }
3499
3500 if (flags & DF_DU_CHAIN)
3501 {
3502 fprintf (file, "Def-use chains:\n");
3503 for (j = 0; j < df->n_uses; j++)
3504 {
3505 if (df->uses[j])
3506 {
3507 fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
3508 j, DF_REF_BBNO (df->uses[j]),
3509 DF_INSN_LUID (df, DF_REF_INSN (df->uses[j])),
3510 DF_REF_INSN_UID (df->uses[j]),
3511 DF_REF_REGNO (df->uses[j]));
3512 df_chain_dump (DF_REF_CHAIN (df->uses[j]), file);
3513 fprintf (file, "\n");
3514 }
3515 }
3516 }
3517
3518 if (flags & DF_LR)
3519 {
3520 fprintf (file, "Live regs:\n");
3521 for (i = 0; i < df->n_bbs; i++)
3522 {
3523 basic_block bb = BASIC_BLOCK (i);
3524 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3525
3526 if (! bb_info->lr_in)
3527 continue;
3528
3529 fprintf (file, "bb %d in \t", i);
3530 dump_bitmap (file, bb_info->lr_in);
3531 fprintf (file, "bb %d use \t", i);
3532 dump_bitmap (file, bb_info->lr_use);
3533 fprintf (file, "bb %d def \t", i);
3534 dump_bitmap (file, bb_info->lr_def);
3535 fprintf (file, "bb %d out \t", i);
3536 dump_bitmap (file, bb_info->lr_out);
3537 }
3538 }
3539
3540 if (flags & (DF_REG_INFO | DF_RD_CHAIN | DF_RU_CHAIN))
3541 {
3542 struct reg_info *reg_info = df->regs;
3543
3544 fprintf (file, "Register info:\n");
3545 for (j = 0; j < df->n_regs; j++)
3546 {
3547 if (((flags & DF_REG_INFO)
3548 && (reg_info[j].n_uses || reg_info[j].n_defs))
3549 || ((flags & DF_RD_CHAIN) && reg_info[j].defs)
3550 || ((flags & DF_RU_CHAIN) && reg_info[j].uses))
3551 {
3552 fprintf (file, "reg %d", j);
3553 if ((flags & DF_RD_CHAIN) && (flags & DF_RU_CHAIN))
3554 {
3555 basic_block bb = df_regno_bb (df, j);
3556
3557 if (bb)
3558 fprintf (file, " bb %d", bb->index);
3559 else
3560 fprintf (file, " bb ?");
3561 }
3562 if (flags & DF_REG_INFO)
3563 {
3564 fprintf (file, " life %d", reg_info[j].lifetime);
3565 }
3566
3567 if ((flags & DF_REG_INFO) || (flags & DF_RD_CHAIN))
3568 {
3569 fprintf (file, " defs ");
3570 if (flags & DF_REG_INFO)
3571 fprintf (file, "%d ", reg_info[j].n_defs);
3572 if (flags & DF_RD_CHAIN)
3573 df_chain_dump (reg_info[j].defs, file);
3574 }
3575
3576 if ((flags & DF_REG_INFO) || (flags & DF_RU_CHAIN))
3577 {
3578 fprintf (file, " uses ");
3579 if (flags & DF_REG_INFO)
3580 fprintf (file, "%d ", reg_info[j].n_uses);
3581 if (flags & DF_RU_CHAIN)
3582 df_chain_dump (reg_info[j].uses, file);
3583 }
3584
3585 fprintf (file, "\n");
3586 }
3587 }
3588 }
3589 fprintf (file, "\n");
3590}
3591
3592
3593void
3594df_insn_debug (df, insn, file)
3595 struct df *df;
3596 rtx insn;
3597 FILE *file;
3598{
3599 unsigned int uid;
3600 int bbi;
3601
3602 uid = INSN_UID (insn);
3603 if (uid >= df->insn_size)
3604 return;
3605
3606 if (df->insns[uid].defs)
3607 bbi = DF_REF_BBNO (df->insns[uid].defs->ref);
3608 else if (df->insns[uid].uses)
3609 bbi = DF_REF_BBNO (df->insns[uid].uses->ref);
3610 else
3611 bbi = -1;
3612
3613 fprintf (file, "insn %d bb %d luid %d defs ",
3614 uid, bbi, DF_INSN_LUID (df, insn));
3615 df_chain_dump (df->insns[uid].defs, file);
3616 fprintf (file, " uses ");
3617 df_chain_dump (df->insns[uid].uses, file);
3618 fprintf (file, "\n");
3619}
3620
3621void
3622df_insn_debug_regno (df, insn, file)
3623 struct df *df;
3624 rtx insn;
3625 FILE *file;
3626{
3627 unsigned int uid;
3628 int bbi;
3629
3630 uid = INSN_UID (insn);
3631 if (uid >= df->insn_size)
3632 return;
3633
3634 if (df->insns[uid].defs)
3635 bbi = DF_REF_BBNO (df->insns[uid].defs->ref);
3636 else if (df->insns[uid].uses)
3637 bbi = DF_REF_BBNO (df->insns[uid].uses->ref);
3638 else
3639 bbi = -1;
3640
3641 fprintf (file, "insn %d bb %d luid %d defs ",
3642 uid, bbi, DF_INSN_LUID (df, insn));
3643 df_chain_dump_regno (df->insns[uid].defs, file);
3644 fprintf (file, " uses ");
3645 df_chain_dump_regno (df->insns[uid].uses, file);
3646 fprintf (file, "\n");
3647}
3648
3649static void
3650df_regno_debug (df, regno, file)
3651 struct df *df;
3652 unsigned int regno;
3653 FILE *file;
3654{
3655 if (regno >= df->reg_size)
3656 return;
3657
3658 fprintf (file, "reg %d life %d defs ",
3659 regno, df->regs[regno].lifetime);
3660 df_chain_dump (df->regs[regno].defs, file);
3661 fprintf (file, " uses ");
3662 df_chain_dump (df->regs[regno].uses, file);
3663 fprintf (file, "\n");
3664}
3665
3666
3667static void
3668df_ref_debug (df, ref, file)
3669 struct df *df;
3670 struct ref *ref;
3671 FILE *file;
3672{
3673 fprintf (file, "%c%d ",
3674 DF_REF_REG_DEF_P (ref) ? 'd' : 'u',
3675 DF_REF_ID (ref));
3676 fprintf (file, "reg %d bb %d luid %d insn %d chain ",
3677 DF_REF_REGNO (ref),
3678 DF_REF_BBNO (ref),
3679 DF_INSN_LUID (df, DF_REF_INSN (ref)),
3680 INSN_UID (DF_REF_INSN (ref)));
3681 df_chain_dump (DF_REF_CHAIN (ref), file);
3682 fprintf (file, "\n");
3683}
3684
3685
3686void
3687debug_df_insn (insn)
3688 rtx insn;
3689{
3690 df_insn_debug (ddf, insn, stderr);
3691 debug_rtx (insn);
3692}
3693
3694
3695void
3696debug_df_reg (reg)
3697 rtx reg;
3698{
3699 df_regno_debug (ddf, REGNO (reg), stderr);
3700}
3701
3702
3703void
3704debug_df_regno (regno)
3705 unsigned int regno;
3706{
3707 df_regno_debug (ddf, regno, stderr);
3708}
3709
3710
3711void
3712debug_df_ref (ref)
3713 struct ref *ref;
3714{
3715 df_ref_debug (ddf, ref, stderr);
3716}
3717
3718
3719void
3720debug_df_defno (defno)
3721 unsigned int defno;
3722{
3723 df_ref_debug (ddf, ddf->defs[defno], stderr);
3724}
3725
3726
3727void
3728debug_df_useno (defno)
3729 unsigned int defno;
3730{
3731 df_ref_debug (ddf, ddf->uses[defno], stderr);
3732}
3733
3734
3735void
3736debug_df_chain (link)
3737 struct df_link *link;
3738{
3739 df_chain_dump (link, stderr);
3740 fputc ('\n', stderr);
3741}
This page took 0.397011 seconds and 5 git commands to generate.