]> gcc.gnu.org Git - gcc.git/blame - gcc/df-problems.c
re PR rtl-optimization/25799 (cc1 stalled with -O1 -fmodulo-sched)
[gcc.git] / gcc / df-problems.c
CommitLineData
4d779342
DB
1/* Standard problems for dataflow support routines.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
3 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
8
9This file is part of GCC.
10
11GCC is free software; you can redistribute it and/or modify it under
12the terms of the GNU General Public License as published by the Free
13Software Foundation; either version 2, or (at your option) any later
14version.
15
16GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17WARRANTY; without even the implied warranty of MERCHANTABILITY or
18FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19for more details.
20
21You should have received a copy of the GNU General Public License
22along with GCC; see the file COPYING. If not, write to the Free
23Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2402110-1301, USA. */
25
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
29#include "tm.h"
30#include "rtl.h"
31#include "tm_p.h"
32#include "insn-config.h"
33#include "recog.h"
34#include "function.h"
35#include "regs.h"
36#include "output.h"
37#include "alloc-pool.h"
38#include "flags.h"
39#include "hard-reg-set.h"
40#include "basic-block.h"
41#include "sbitmap.h"
42#include "bitmap.h"
43#include "timevar.h"
44#include "df.h"
45
46#define DF_SPARSE_THRESHOLD 32
47
48static bitmap seen_in_block = NULL;
49static bitmap seen_in_insn = NULL;
50
51\f
52/*----------------------------------------------------------------------------
53 Public functions access functions for the dataflow problems.
54----------------------------------------------------------------------------*/
55
56/* Get the instance of the problem that DFLOW is dependent on. */
57
58struct dataflow *
59df_get_dependent_problem (struct dataflow *dflow)
60{
61 struct df *df = dflow->df;
62 struct df_problem *dependent_problem = dflow->problem->dependent_problem;
63
64 gcc_assert (dependent_problem);
65 return df->problems_by_index[dependent_problem->id];
66}
67
68
69/* Create a du or ud chain from SRC to DST and link it into SRC. */
70
71struct df_link *
72df_chain_create (struct dataflow *dflow, struct df_ref *src, struct df_ref *dst)
73{
74 struct df_link *head = DF_REF_CHAIN (src);
75 struct df_link *link = pool_alloc (dflow->block_pool);;
76
77 DF_REF_CHAIN (src) = link;
78 link->next = head;
79 link->ref = dst;
80 return link;
81}
82
83
84/* Delete a du or ud chain for REF. If LINK is NULL, delete all
85 chains for ref and check to see if the reverse chains can also be
86 deleted. If LINK is not NULL it must be a link off of ref. In
87 this case, the other end is not deleted. */
88
89void
90df_chain_unlink (struct dataflow *dflow, struct df_ref *ref, struct df_link *link)
91{
92 struct df_link *chain = DF_REF_CHAIN (ref);
93 if (link)
94 {
95 /* Link was the first element in the chain. */
96 if (chain == link)
97 DF_REF_CHAIN (ref) = link->next;
98 else
99 {
100 /* Link is an internal element in the chain. */
101 struct df_link *prev = chain;
102 while (chain)
103 {
104 if (chain == link)
105 {
106 prev->next = chain->next;
107 break;
108 }
109 prev = chain;
110 chain = chain->next;
111 }
112 }
113 pool_free (dflow->block_pool, link);
114 }
115 else
116 {
117 /* If chain is NULL here, it was because of a recursive call
118 when the other flavor of chains was not built. Just run thru
119 the entire chain calling the other side and then deleting the
120 link. */
121 while (chain)
122 {
123 struct df_link *next = chain->next;
124 /* Delete the other side if it exists. */
125 df_chain_unlink (dflow, chain->ref, chain);
126 chain = next;
127 }
128 }
129}
130
131
132/* Copy the du or ud chain starting at FROM_REF and attach it to
133 TO_REF. */
134
135void
136df_chain_copy (struct dataflow *dflow,
137 struct df_ref *to_ref,
138 struct df_link *from_ref)
139{
140 while (from_ref)
141 {
142 df_chain_create (dflow, to_ref, from_ref->ref);
143 from_ref = from_ref->next;
144 }
145}
146
147
148/* Get the live in set for BB no matter what problem happens to be
149 defined. */
150
151bitmap
152df_get_live_in (struct df *df, basic_block bb)
153{
154 gcc_assert (df->problems_by_index[DF_LR]);
155
156 if (df->problems_by_index[DF_UREC])
157 return DF_RA_LIVE_IN (df, bb);
158 else if (df->problems_by_index[DF_UR])
159 return DF_LIVE_IN (df, bb);
160 else
161 return DF_UPWARD_LIVE_IN (df, bb);
162}
163
164
165/* Get the live out set for BB no matter what problem happens to be
166 defined. */
167
168bitmap
169df_get_live_out (struct df *df, basic_block bb)
170{
171 gcc_assert (df->problems_by_index[DF_LR]);
172
173 if (df->problems_by_index[DF_UREC])
174 return DF_RA_LIVE_OUT (df, bb);
175 else if (df->problems_by_index[DF_UR])
176 return DF_LIVE_OUT (df, bb);
177 else
178 return DF_UPWARD_LIVE_OUT (df, bb);
179}
180
181
182/*----------------------------------------------------------------------------
183 Utility functions.
184----------------------------------------------------------------------------*/
185
186/* Generic versions to get the void* version of the block info. Only
187 used inside the problem instace vectors. */
188
189/* Grow the bb_info array. */
190
191void
192df_grow_bb_info (struct dataflow *dflow)
193{
194 unsigned int new_size = last_basic_block + 1;
195 if (dflow->block_info_size < new_size)
196 {
197 new_size += new_size / 4;
198 dflow->block_info = xrealloc (dflow->block_info,
199 new_size *sizeof (void*));
200 memset (dflow->block_info + dflow->block_info_size, 0,
201 (new_size - dflow->block_info_size) *sizeof (void *));
202 dflow->block_info_size = new_size;
203 }
204}
205
206/* Dump a def-use or use-def chain for REF to FILE. */
207
208void
209df_chain_dump (struct df *df ATTRIBUTE_UNUSED, struct df_link *link, FILE *file)
210{
211 fprintf (file, "{ ");
212 for (; link; link = link->next)
213 {
214 fprintf (file, "%c%d(bb %d insn %d) ",
215 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
216 DF_REF_ID (link->ref),
217 DF_REF_BBNO (link->ref),
218 DF_REF_INSN (link->ref) ? DF_REF_INSN_UID (link->ref) : -1);
219 }
220 fprintf (file, "}");
221}
222
223
224/* Print some basic block info as part of df_dump. */
225
226void
227df_print_bb_index (basic_block bb, FILE *file)
228{
229 edge e;
230 edge_iterator ei;
231
232 fprintf (file, "( ");
233 FOR_EACH_EDGE (e, ei, bb->preds)
234 {
235 basic_block pred = e->src;
236 fprintf (file, "%d ", pred->index);
237 }
238 fprintf (file, ")->[%d]->( ", bb->index);
239 FOR_EACH_EDGE (e, ei, bb->succs)
240 {
241 basic_block succ = e->dest;
242 fprintf (file, "%d ", succ->index);
243 }
244 fprintf (file, ")\n");
245}
246
247
248/* Return the set of reference ids in CHAIN, caching the result in *BMAP. */
249
250static inline bitmap
251df_ref_bitmap (bitmap *maps, unsigned int regno, int start, int count)
252{
253 bitmap ids = maps[regno];
254 if (!ids)
255 {
256 unsigned int i;
257 unsigned int end = start + count;;
258 ids = BITMAP_ALLOC (NULL);
259 maps[regno] = ids;
260 for (i = start; i < end; i++)
261 bitmap_set_bit (ids, i);
262 }
263 return ids;
264}
265
266
267/* Make sure that the seen_in_insn and seen_in_block sbitmaps are set
268 up correctly. */
269
270static void
271df_set_seen (void)
272{
273 seen_in_block = BITMAP_ALLOC (NULL);
274 seen_in_insn = BITMAP_ALLOC (NULL);
275}
276
277
278static void
279df_unset_seen (void)
280{
281 BITMAP_FREE (seen_in_block);
282 BITMAP_FREE (seen_in_insn);
283}
284
285
286\f
287/*----------------------------------------------------------------------------
288 REACHING USES
289
290 Find the locations in the function where each use site for a pseudo
291 can reach backwards.
292
293----------------------------------------------------------------------------*/
294
295struct df_ru_problem_data
296{
297 bitmap *use_sites; /* Bitmap of uses for each pseudo. */
298 unsigned int use_sites_size; /* Size of use_sites. */
299 /* The set of defs to regs invalidated by call. */
300 bitmap sparse_invalidated_by_call;
301 /* The set of defs to regs invalidate by call for ru. */
302 bitmap dense_invalidated_by_call;
303};
304
305/* Get basic block info. */
306
307struct df_ru_bb_info *
308df_ru_get_bb_info (struct dataflow *dflow, unsigned int index)
309{
310 return (struct df_ru_bb_info *) dflow->block_info[index];
311}
312
313
314/* Set basic block info. */
315
316static void
317df_ru_set_bb_info (struct dataflow *dflow, unsigned int index,
318 struct df_ru_bb_info *bb_info)
319{
320 dflow->block_info[index] = bb_info;
321}
322
323
324/* Free basic block info. */
325
326static void
327df_ru_free_bb_info (struct dataflow *dflow, void *vbb_info)
328{
329 struct df_ru_bb_info *bb_info = (struct df_ru_bb_info *) vbb_info;
330 if (bb_info)
331 {
332 BITMAP_FREE (bb_info->kill);
333 BITMAP_FREE (bb_info->sparse_kill);
334 BITMAP_FREE (bb_info->gen);
335 BITMAP_FREE (bb_info->in);
336 BITMAP_FREE (bb_info->out);
337 pool_free (dflow->block_pool, bb_info);
338 }
339}
340
341
342/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
343 not touched unless the block is new. */
344
345static void
346df_ru_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
347{
348 unsigned int bb_index;
349 bitmap_iterator bi;
350 unsigned int reg_size = max_reg_num ();
351
352 if (! dflow->block_pool)
353 dflow->block_pool = create_alloc_pool ("df_ru_block pool",
354 sizeof (struct df_ru_bb_info), 50);
355
356 if (dflow->problem_data)
357 {
358 unsigned int i;
359 struct df_ru_problem_data *problem_data =
360 (struct df_ru_problem_data *) dflow->problem_data;
361
362 for (i = 0; i < problem_data->use_sites_size; i++)
363 {
364 bitmap bm = problem_data->use_sites[i];
365 if (bm)
366 {
367 BITMAP_FREE (bm);
368 problem_data->use_sites[i] = NULL;
369 }
370 }
371
03fd2215 372 if (problem_data->use_sites_size < reg_size)
4d779342
DB
373 {
374 problem_data->use_sites
03fd2215
ZD
375 = xrealloc (problem_data->use_sites, reg_size * sizeof (bitmap));
376 memset (problem_data->use_sites + problem_data->use_sites_size, 0,
377 (reg_size - problem_data->use_sites_size) * sizeof (bitmap));
4d779342
DB
378 problem_data->use_sites_size = reg_size;
379 }
380
381 bitmap_clear (problem_data->sparse_invalidated_by_call);
382 bitmap_clear (problem_data->dense_invalidated_by_call);
383 }
384 else
385 {
386 struct df_ru_problem_data *problem_data =
387 xmalloc (sizeof (struct df_ru_problem_data));
388 dflow->problem_data = problem_data;
389
390 problem_data->use_sites = xcalloc (reg_size, sizeof (bitmap));
391 problem_data->use_sites_size = reg_size;
392 problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
393 problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
394 }
395
396 df_grow_bb_info (dflow);
397
398 /* Because of the clustering of all def sites for the same pseudo,
399 we have to process all of the blocks before doing the
400 analysis. */
401
402 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
403 {
404 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
405 if (bb_info)
406 {
407 bitmap_clear (bb_info->kill);
408 bitmap_clear (bb_info->sparse_kill);
409 bitmap_clear (bb_info->gen);
410 }
411 else
412 {
413 bb_info = (struct df_ru_bb_info *) pool_alloc (dflow->block_pool);
414 df_ru_set_bb_info (dflow, bb_index, bb_info);
415 bb_info->kill = BITMAP_ALLOC (NULL);
416 bb_info->sparse_kill = BITMAP_ALLOC (NULL);
417 bb_info->gen = BITMAP_ALLOC (NULL);
418 bb_info->in = BITMAP_ALLOC (NULL);
419 bb_info->out = BITMAP_ALLOC (NULL);
420 }
421 }
422}
423
424
425/* Process a list of DEFs for df_ru_bb_local_compute. */
426
427static void
428df_ru_bb_local_compute_process_def (struct dataflow *dflow,
429 struct df_ru_bb_info *bb_info,
430 struct df_ref *def)
431{
432 struct df *df = dflow->df;
433 while (def)
434 {
435 unsigned int regno = DF_REF_REGNO (def);
436 unsigned int begin = DF_REG_USE_GET (df, regno)->begin;
437 unsigned int n_uses = DF_REG_USE_GET (df, regno)->n_refs;
438 if (!bitmap_bit_p (seen_in_block, regno))
439 {
440 /* The first def for regno, causes the kill info to be
441 generated and the gen information to cleared. */
442 if (!bitmap_bit_p (seen_in_insn, regno))
443 {
444 if (n_uses > DF_SPARSE_THRESHOLD)
445 {
446 bitmap_set_bit (bb_info->sparse_kill, regno);
447 bitmap_clear_range (bb_info->gen, begin, n_uses);
448 }
449 else
450 {
451 struct df_ru_problem_data *problem_data =
452 (struct df_ru_problem_data *) dflow->problem_data;
453 bitmap uses =
454 df_ref_bitmap (problem_data->use_sites, regno,
455 begin, n_uses);
456 bitmap_ior_into (bb_info->kill, uses);
457 bitmap_and_compl_into (bb_info->gen, uses);
458 }
459 }
460 bitmap_set_bit (seen_in_insn, regno);
461 }
462 def = def->next_ref;
463 }
464}
465
466
467/* Process a list of USEs for df_ru_bb_local_compute. */
468
469static void
470df_ru_bb_local_compute_process_use (struct df_ru_bb_info *bb_info,
471 struct df_ref *use,
472 enum df_ref_flags top_flag)
473{
474 while (use)
475 {
476 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
477 {
478 /* Add use to set of gens in this BB unless we have seen a
479 def in a previous instruction. */
480 unsigned int regno = DF_REF_REGNO (use);
481 if (!bitmap_bit_p (seen_in_block, regno))
482 bitmap_set_bit (bb_info->gen, DF_REF_ID (use));
483 }
484 use = use->next_ref;
485 }
486}
487
488/* Compute local reaching use (upward exposed use) info for basic
489 block BB. USE_INFO->REGS[R] caches the set of uses for register R. */
490static void
491df_ru_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
492{
493 struct df *df = dflow->df;
494 basic_block bb = BASIC_BLOCK (bb_index);
495 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
496 rtx insn;
497
498 /* Set when a def for regno is seen. */
499 bitmap_clear (seen_in_block);
500 bitmap_clear (seen_in_insn);
501
502#ifdef EH_USES
503 /* Variables defined in the prolog that are used by the exception
504 handler. */
505 df_ru_bb_local_compute_process_use (bb_info,
506 df_get_artificial_uses (df, bb_index),
507 DF_REF_AT_TOP);
508#endif
509
510 /* Process the artificial defs first since these are at the top of
511 the block. */
512 df_ru_bb_local_compute_process_def (dflow, bb_info,
513 df_get_artificial_defs (df, bb_index));
514
515 FOR_BB_INSNS (bb, insn)
516 {
517 unsigned int uid = INSN_UID (insn);
518 if (! INSN_P (insn))
519 continue;
520
521 df_ru_bb_local_compute_process_def (dflow, bb_info,
522 DF_INSN_UID_GET (df, uid)->defs);
523
524 /* The use processing must happen after the defs processing even
525 though the uses logically happen first since the defs clear
526 the gen set. Otherwise, a use for regno occuring in the same
527 instruction as a def for regno would be cleared. */
528 df_ru_bb_local_compute_process_use (bb_info,
529 DF_INSN_UID_GET (df, uid)->uses, 0);
530
531 bitmap_ior_into (seen_in_block, seen_in_insn);
532 bitmap_clear (seen_in_insn);
533 }
534
535 /* Process the hardware registers that are always live. */
536 df_ru_bb_local_compute_process_use (bb_info,
537 df_get_artificial_uses (df, bb_index), 0);
538}
539
540
541/* Compute local reaching use (upward exposed use) info for each basic
542 block within BLOCKS. */
543static void
544df_ru_local_compute (struct dataflow *dflow,
545 bitmap all_blocks,
546 bitmap rescan_blocks ATTRIBUTE_UNUSED)
547{
548 struct df *df = dflow->df;
549 unsigned int bb_index;
550 bitmap_iterator bi;
551 unsigned int regno;
552 struct df_ru_problem_data *problem_data =
553 (struct df_ru_problem_data *) dflow->problem_data;
554 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
555 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
556
557 df_set_seen ();
558
559 if (!df->use_info.refs_organized)
560 df_reorganize_refs (&df->use_info);
561
562 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
563 {
564 df_ru_bb_local_compute (dflow, bb_index);
565 }
566
567 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
568 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
569 {
570 struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
571 if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
572 bitmap_set_bit (sparse_invalidated, regno);
573 else
574 {
575 bitmap defs = df_ref_bitmap (problem_data->use_sites, regno,
576 reg_info->begin, reg_info->n_refs);
577 bitmap_ior_into (dense_invalidated, defs);
578 }
579 }
580
581 df_unset_seen ();
582}
583
584
585/* Initialize the solution bit vectors for problem. */
586
587static void
588df_ru_init_solution (struct dataflow *dflow, bitmap all_blocks)
589{
590 unsigned int bb_index;
591 bitmap_iterator bi;
592
593 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
594 {
595 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
596 bitmap_copy (bb_info->in, bb_info->gen);
597 bitmap_clear (bb_info->out);
598 }
599}
600
601
602/* Out of target gets or of in of source. */
603
604static void
605df_ru_confluence_n (struct dataflow *dflow, edge e)
606{
607 bitmap op1 = df_ru_get_bb_info (dflow, e->src->index)->out;
608 bitmap op2 = df_ru_get_bb_info (dflow, e->dest->index)->in;
609
610 if (e->flags & EDGE_EH)
611 {
612 struct df_ru_problem_data *problem_data =
613 (struct df_ru_problem_data *) dflow->problem_data;
614 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
615 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
616 struct df *df = dflow->df;
617 bitmap_iterator bi;
618 unsigned int regno;
59c52af4
KZ
619 bitmap tmp = BITMAP_ALLOC (NULL);
620
621 bitmap_copy (tmp, op2);
622 bitmap_and_compl_into (tmp, dense_invalidated);
623
4d779342
DB
624 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
625 {
59c52af4 626 bitmap_clear_range (tmp,
4d779342
DB
627 DF_REG_USE_GET (df, regno)->begin,
628 DF_REG_USE_GET (df, regno)->n_refs);
629 }
59c52af4
KZ
630 bitmap_ior_into (op1, tmp);
631 BITMAP_FREE (tmp);
4d779342
DB
632 }
633 else
634 bitmap_ior_into (op1, op2);
635}
636
637
638/* Transfer function. */
639
640static bool
641df_ru_transfer_function (struct dataflow *dflow, int bb_index)
642{
643 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
644 unsigned int regno;
645 bitmap_iterator bi;
646 bitmap in = bb_info->in;
647 bitmap out = bb_info->out;
648 bitmap gen = bb_info->gen;
649 bitmap kill = bb_info->kill;
650 bitmap sparse_kill = bb_info->sparse_kill;
651
652 if (bitmap_empty_p (sparse_kill))
653 return bitmap_ior_and_compl (in, gen, out, kill);
654 else
655 {
656 struct df *df = dflow->df;
657 bool changed = false;
658 bitmap tmp = BITMAP_ALLOC (NULL);
659 bitmap_copy (tmp, in);
660 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
661 {
662 bitmap_clear_range (tmp,
663 DF_REG_USE_GET (df, regno)->begin,
664 DF_REG_USE_GET (df, regno)->n_refs);
665 }
666 bitmap_and_compl_into (tmp, kill);
667 bitmap_ior_into (tmp, gen);
59c52af4 668 changed = !bitmap_equal_p (tmp, in);
4d779342
DB
669 if (changed)
670 {
671 BITMAP_FREE (out);
672 bb_info->in = tmp;
673 }
674 else
675 BITMAP_FREE (tmp);
676 return changed;
677 }
678}
679
680
681/* Free all storage associated with the problem. */
682
683static void
684df_ru_free (struct dataflow *dflow)
685{
686 unsigned int i;
687 struct df_ru_problem_data *problem_data =
688 (struct df_ru_problem_data *) dflow->problem_data;
689
690 for (i = 0; i < dflow->block_info_size; i++)
691 {
692 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, i);
693 if (bb_info)
694 {
695 BITMAP_FREE (bb_info->kill);
696 BITMAP_FREE (bb_info->sparse_kill);
697 BITMAP_FREE (bb_info->gen);
698 BITMAP_FREE (bb_info->in);
699 BITMAP_FREE (bb_info->out);
700 }
701 }
702
703 free_alloc_pool (dflow->block_pool);
704
705 for (i = 0; i < problem_data->use_sites_size; i++)
706 {
707 bitmap bm = problem_data->use_sites[i];
708 if (bm)
709 BITMAP_FREE (bm);
710 }
711
712 free (problem_data->use_sites);
713 BITMAP_FREE (problem_data->sparse_invalidated_by_call);
714 BITMAP_FREE (problem_data->dense_invalidated_by_call);
715
716 dflow->block_info_size = 0;
717 free (dflow->block_info);
718 free (dflow->problem_data);
719 free (dflow);
720}
721
722
723/* Debugging info. */
724
725static void
726df_ru_dump (struct dataflow *dflow, FILE *file)
727{
728 basic_block bb;
729 struct df *df = dflow->df;
730 struct df_ru_problem_data *problem_data =
731 (struct df_ru_problem_data *) dflow->problem_data;
732 unsigned int m = max_reg_num ();
733 unsigned int regno;
734
735 fprintf (file, "Reaching uses:\n");
736
737 fprintf (file, " sparse invalidated \t");
738 dump_bitmap (file, problem_data->sparse_invalidated_by_call);
739 fprintf (file, " dense invalidated \t");
740 dump_bitmap (file, problem_data->dense_invalidated_by_call);
741
742 for (regno = 0; regno < m; regno++)
743 if (DF_REG_USE_GET (df, regno)->n_refs)
744 fprintf (file, "%d[%d,%d] ", regno,
745 DF_REG_USE_GET (df, regno)->begin,
746 DF_REG_USE_GET (df, regno)->n_refs);
747 fprintf (file, "\n");
748
749 FOR_ALL_BB (bb)
750 {
751 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb->index);
752 df_print_bb_index (bb, file);
753
754 if (! bb_info->in)
755 continue;
756
757 fprintf (file, " in \t");
758 dump_bitmap (file, bb_info->in);
759 fprintf (file, " gen \t");
760 dump_bitmap (file, bb_info->gen);
761 fprintf (file, " kill\t");
762 dump_bitmap (file, bb_info->kill);
763 fprintf (file, " out \t");
764 dump_bitmap (file, bb_info->out);
765 }
766}
767
768/* All of the information associated with every instance of the problem. */
769
770static struct df_problem problem_RU =
771{
772 DF_RU, /* Problem id. */
773 DF_BACKWARD, /* Direction. */
774 df_ru_alloc, /* Allocate the problem specific data. */
775 df_ru_free_bb_info, /* Free basic block info. */
776 df_ru_local_compute, /* Local compute function. */
777 df_ru_init_solution, /* Init the solution specific data. */
778 df_iterative_dataflow, /* Iterative solver. */
779 NULL, /* Confluence operator 0. */
780 df_ru_confluence_n, /* Confluence operator n. */
781 df_ru_transfer_function, /* Transfer function. */
782 NULL, /* Finalize function. */
783 df_ru_free, /* Free all of the problem information. */
784 df_ru_dump, /* Debugging. */
785 NULL /* Dependent problem. */
786};
787
788
789
790/* Create a new DATAFLOW instance and add it to an existing instance
791 of DF. The returned structure is what is used to get at the
792 solution. */
793
794struct dataflow *
795df_ru_add_problem (struct df *df)
796{
797 return df_add_problem (df, &problem_RU);
798}
799
800\f
801/*----------------------------------------------------------------------------
802 REACHING DEFINITIONS
803
804 Find the locations in the function where each definition site for a
805 pseudo reaches.
806----------------------------------------------------------------------------*/
807
808struct df_rd_problem_data
809{
810 bitmap *def_sites; /* Bitmap of defs for each pseudo. */
811 unsigned int def_sites_size; /* Size of def_sites. */
812 /* The set of defs to regs invalidated by call. */
813 bitmap sparse_invalidated_by_call;
814 /* The set of defs to regs invalidate by call for ru. */
815 bitmap dense_invalidated_by_call;
816};
817
818/* Get basic block info. */
819
820struct df_rd_bb_info *
821df_rd_get_bb_info (struct dataflow *dflow, unsigned int index)
822{
823 return (struct df_rd_bb_info *) dflow->block_info[index];
824}
825
826
827/* Set basic block info. */
828
829static void
830df_rd_set_bb_info (struct dataflow *dflow, unsigned int index,
831 struct df_rd_bb_info *bb_info)
832{
833 dflow->block_info[index] = bb_info;
834}
835
836
837/* Free basic block info. */
838
839static void
840df_rd_free_bb_info (struct dataflow *dflow, void *vbb_info)
841{
842 struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
843 if (bb_info)
844 {
845 BITMAP_FREE (bb_info->kill);
846 BITMAP_FREE (bb_info->sparse_kill);
847 BITMAP_FREE (bb_info->gen);
848 BITMAP_FREE (bb_info->in);
849 BITMAP_FREE (bb_info->out);
850 pool_free (dflow->block_pool, bb_info);
851 }
852}
853
854
855/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
856 not touched unless the block is new. */
857
858static void
859df_rd_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
860{
861 unsigned int bb_index;
862 bitmap_iterator bi;
863 unsigned int reg_size = max_reg_num ();
864
865 if (! dflow->block_pool)
866 dflow->block_pool = create_alloc_pool ("df_rd_block pool",
867 sizeof (struct df_rd_bb_info), 50);
868
869 if (dflow->problem_data)
870 {
871 unsigned int i;
872 struct df_rd_problem_data *problem_data =
873 (struct df_rd_problem_data *) dflow->problem_data;
874
875 for (i = 0; i < problem_data->def_sites_size; i++)
876 {
877 bitmap bm = problem_data->def_sites[i];
878 if (bm)
879 {
880 BITMAP_FREE (bm);
881 problem_data->def_sites[i] = NULL;
882 }
883 }
884
03fd2215 885 if (problem_data->def_sites_size < reg_size)
4d779342
DB
886 {
887 problem_data->def_sites
888 = xrealloc (problem_data->def_sites, reg_size *sizeof (bitmap));
03fd2215 889 memset (problem_data->def_sites + problem_data->def_sites_size, 0,
4d779342
DB
890 (reg_size - problem_data->def_sites_size) *sizeof (bitmap));
891 problem_data->def_sites_size = reg_size;
892 }
893
894 bitmap_clear (problem_data->sparse_invalidated_by_call);
895 bitmap_clear (problem_data->dense_invalidated_by_call);
896 }
897 else
898 {
899 struct df_rd_problem_data *problem_data =
900 xmalloc (sizeof (struct df_rd_problem_data));
901 dflow->problem_data = problem_data;
902
903 problem_data->def_sites = xcalloc (reg_size, sizeof (bitmap));
904 problem_data->def_sites_size = reg_size;
905 problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
906 problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
907 }
908
909 df_grow_bb_info (dflow);
910
911 /* Because of the clustering of all def sites for the same pseudo,
912 we have to process all of the blocks before doing the
913 analysis. */
914
915 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
916 {
917 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
918 if (bb_info)
919 {
920 bitmap_clear (bb_info->kill);
921 bitmap_clear (bb_info->sparse_kill);
922 bitmap_clear (bb_info->gen);
923 }
924 else
925 {
926 bb_info = (struct df_rd_bb_info *) pool_alloc (dflow->block_pool);
927 df_rd_set_bb_info (dflow, bb_index, bb_info);
928 bb_info->kill = BITMAP_ALLOC (NULL);
929 bb_info->sparse_kill = BITMAP_ALLOC (NULL);
930 bb_info->gen = BITMAP_ALLOC (NULL);
931 bb_info->in = BITMAP_ALLOC (NULL);
932 bb_info->out = BITMAP_ALLOC (NULL);
933 }
934 }
935}
936
937
938/* Process a list of DEFs for df_rd_bb_local_compute. */
939
940static void
941df_rd_bb_local_compute_process_def (struct dataflow *dflow,
942 struct df_rd_bb_info *bb_info,
943 struct df_ref *def)
944{
945 struct df *df = dflow->df;
946 while (def)
947 {
948 unsigned int regno = DF_REF_REGNO (def);
949 unsigned int begin = DF_REG_DEF_GET (df, regno)->begin;
950 unsigned int n_defs = DF_REG_DEF_GET (df, regno)->n_refs;
951
952 /* Only the last def(s) for a regno in the block has any
953 effect. */
954 if (!bitmap_bit_p (seen_in_block, regno))
955 {
956 /* The first def for regno in insn gets to knock out the
957 defs from other instructions. */
958 if (!bitmap_bit_p (seen_in_insn, regno))
959 {
960 if (n_defs > DF_SPARSE_THRESHOLD)
961 {
962 bitmap_set_bit (bb_info->sparse_kill, regno);
963 bitmap_clear_range (bb_info->gen, begin, n_defs);
964 }
965 else
966 {
967 struct df_rd_problem_data *problem_data =
968 (struct df_rd_problem_data *) dflow->problem_data;
969 bitmap defs =
970 df_ref_bitmap (problem_data->def_sites, regno,
971 begin, n_defs);
972 bitmap_ior_into (bb_info->kill, defs);
973 bitmap_and_compl_into (bb_info->gen, defs);
974 }
975 }
976
977 bitmap_set_bit (seen_in_insn, regno);
978 /* All defs for regno in the instruction may be put into
979 the gen set. */
980 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
981 bitmap_set_bit (bb_info->gen, DF_REF_ID (def));
982 }
983 def = def->next_ref;
984 }
985}
986
987/* Compute local reaching def info for basic block BB. */
988
989static void
990df_rd_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
991{
992 struct df *df = dflow->df;
993 basic_block bb = BASIC_BLOCK (bb_index);
994 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
995 rtx insn;
996
997 bitmap_clear (seen_in_block);
998 bitmap_clear (seen_in_insn);
999
1000 FOR_BB_INSNS_REVERSE (bb, insn)
1001 {
1002 unsigned int uid = INSN_UID (insn);
1003
1004 if (! INSN_P (insn))
1005 continue;
1006
1007 df_rd_bb_local_compute_process_def (dflow, bb_info,
1008 DF_INSN_UID_GET (df, uid)->defs);
1009
1010 /* This complex dance with the two bitmaps is required because
1011 instructions can assign twice to the same pseudo. This
1012 generally happens with calls that will have one def for the
1013 result and another def for the clobber. If only one vector
1014 is used and the clobber goes first, the result will be
1015 lost. */
1016 bitmap_ior_into (seen_in_block, seen_in_insn);
1017 bitmap_clear (seen_in_insn);
1018 }
1019
1020 /* Process the artificial defs last since we are going backwards
1021 thur the block and these are logically at the start. */
1022 df_rd_bb_local_compute_process_def (dflow, bb_info,
1023 df_get_artificial_defs (df, bb_index));
1024}
1025
1026
1027/* Compute local reaching def info for each basic block within BLOCKS. */
1028
1029static void
1030df_rd_local_compute (struct dataflow *dflow,
1031 bitmap all_blocks,
1032 bitmap rescan_blocks ATTRIBUTE_UNUSED)
1033{
1034 struct df *df = dflow->df;
1035 unsigned int bb_index;
1036 bitmap_iterator bi;
1037 unsigned int regno;
1038 struct df_rd_problem_data *problem_data =
1039 (struct df_rd_problem_data *) dflow->problem_data;
1040 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1041 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1042
1043 df_set_seen ();
1044
1045 if (!df->def_info.refs_organized)
1046 df_reorganize_refs (&df->def_info);
1047
1048 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1049 {
1050 df_rd_bb_local_compute (dflow, bb_index);
1051 }
1052
1053 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
1054 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
1055 {
1056 struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
1057 if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
1058 {
1059 bitmap_set_bit (sparse_invalidated, regno);
1060 }
1061 else
1062 {
1063 bitmap defs = df_ref_bitmap (problem_data->def_sites, regno,
1064 reg_info->begin, reg_info->n_refs);
1065 bitmap_ior_into (dense_invalidated, defs);
1066 }
1067 }
1068 df_unset_seen ();
1069}
1070
1071
1072/* Initialize the solution bit vectors for problem. */
1073
1074static void
1075df_rd_init_solution (struct dataflow *dflow, bitmap all_blocks)
1076{
1077 unsigned int bb_index;
1078 bitmap_iterator bi;
1079
1080 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1081 {
1082 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1083
1084 bitmap_copy (bb_info->out, bb_info->gen);
1085 bitmap_clear (bb_info->in);
1086 }
1087}
1088
1089/* In of target gets or of out of source. */
1090
1091static void
1092df_rd_confluence_n (struct dataflow *dflow, edge e)
1093{
1094 bitmap op1 = df_rd_get_bb_info (dflow, e->dest->index)->in;
1095 bitmap op2 = df_rd_get_bb_info (dflow, e->src->index)->out;
1096
1097 if (e->flags & EDGE_EH)
1098 {
1099 struct df_rd_problem_data *problem_data =
1100 (struct df_rd_problem_data *) dflow->problem_data;
1101 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1102 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1103 struct df *df = dflow->df;
1104 bitmap_iterator bi;
1105 unsigned int regno;
59c52af4
KZ
1106 bitmap tmp = BITMAP_ALLOC (NULL);
1107
1108 bitmap_copy (tmp, op2);
1109 bitmap_and_compl_into (tmp, dense_invalidated);
1110
4d779342
DB
1111 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
1112 {
59c52af4 1113 bitmap_clear_range (tmp,
4d779342
DB
1114 DF_REG_DEF_GET (df, regno)->begin,
1115 DF_REG_DEF_GET (df, regno)->n_refs);
1116 }
59c52af4
KZ
1117 bitmap_ior_into (op1, tmp);
1118 BITMAP_FREE (tmp);
4d779342
DB
1119 }
1120 else
1121 bitmap_ior_into (op1, op2);
1122}
1123
1124
1125/* Transfer function. */
1126
1127static bool
1128df_rd_transfer_function (struct dataflow *dflow, int bb_index)
1129{
1130 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1131 unsigned int regno;
1132 bitmap_iterator bi;
1133 bitmap in = bb_info->in;
1134 bitmap out = bb_info->out;
1135 bitmap gen = bb_info->gen;
1136 bitmap kill = bb_info->kill;
1137 bitmap sparse_kill = bb_info->sparse_kill;
1138
1139 if (bitmap_empty_p (sparse_kill))
1140 return bitmap_ior_and_compl (out, gen, in, kill);
1141 else
1142 {
1143 struct df *df = dflow->df;
1144 bool changed = false;
1145 bitmap tmp = BITMAP_ALLOC (NULL);
1146 bitmap_copy (tmp, in);
1147 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
1148 {
1149 bitmap_clear_range (tmp,
1150 DF_REG_DEF_GET (df, regno)->begin,
1151 DF_REG_DEF_GET (df, regno)->n_refs);
1152 }
1153 bitmap_and_compl_into (tmp, kill);
1154 bitmap_ior_into (tmp, gen);
1155 changed = !bitmap_equal_p (tmp, out);
1156 if (changed)
1157 {
1158 BITMAP_FREE (out);
1159 bb_info->out = tmp;
1160 }
1161 else
1162 BITMAP_FREE (tmp);
1163 return changed;
1164 }
1165}
1166
1167
1168/* Free all storage associated with the problem. */
1169
1170static void
1171df_rd_free (struct dataflow *dflow)
1172{
1173 unsigned int i;
1174 struct df_rd_problem_data *problem_data =
1175 (struct df_rd_problem_data *) dflow->problem_data;
1176
1177 for (i = 0; i < dflow->block_info_size; i++)
1178 {
1179 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, i);
1180 if (bb_info)
1181 {
1182 BITMAP_FREE (bb_info->kill);
1183 BITMAP_FREE (bb_info->sparse_kill);
1184 BITMAP_FREE (bb_info->gen);
1185 BITMAP_FREE (bb_info->in);
1186 BITMAP_FREE (bb_info->out);
1187 }
1188 }
1189
1190 free_alloc_pool (dflow->block_pool);
1191
1192 for (i = 0; i < problem_data->def_sites_size; i++)
1193 {
1194 bitmap bm = problem_data->def_sites[i];
1195 if (bm)
1196 BITMAP_FREE (bm);
1197 }
1198
1199 free (problem_data->def_sites);
1200 BITMAP_FREE (problem_data->sparse_invalidated_by_call);
1201 BITMAP_FREE (problem_data->dense_invalidated_by_call);
1202
1203 dflow->block_info_size = 0;
1204 free (dflow->block_info);
1205 free (dflow->problem_data);
1206 free (dflow);
1207}
1208
1209
1210/* Debugging info. */
1211
1212static void
1213df_rd_dump (struct dataflow *dflow, FILE *file)
1214{
1215 struct df *df = dflow->df;
1216 basic_block bb;
1217 struct df_rd_problem_data *problem_data =
1218 (struct df_rd_problem_data *) dflow->problem_data;
1219 unsigned int m = max_reg_num ();
1220 unsigned int regno;
1221
1222 fprintf (file, "Reaching defs:\n\n");
1223
1224 fprintf (file, " sparse invalidated \t");
1225 dump_bitmap (file, problem_data->sparse_invalidated_by_call);
1226 fprintf (file, " dense invalidated \t");
1227 dump_bitmap (file, problem_data->dense_invalidated_by_call);
1228
1229 for (regno = 0; regno < m; regno++)
1230 if (DF_REG_DEF_GET (df, regno)->n_refs)
1231 fprintf (file, "%d[%d,%d] ", regno,
1232 DF_REG_DEF_GET (df, regno)->begin,
1233 DF_REG_DEF_GET (df, regno)->n_refs);
1234 fprintf (file, "\n");
1235
1236 FOR_ALL_BB (bb)
1237 {
1238 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb->index);
1239 df_print_bb_index (bb, file);
1240
1241 if (! bb_info->in)
1242 continue;
1243
1244 fprintf (file, " in\t(%d)\n", (int) bitmap_count_bits (bb_info->in));
1245 dump_bitmap (file, bb_info->in);
1246 fprintf (file, " gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
1247 dump_bitmap (file, bb_info->gen);
1248 fprintf (file, " kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
1249 dump_bitmap (file, bb_info->kill);
1250 fprintf (file, " out\t(%d)\n", (int) bitmap_count_bits (bb_info->out));
1251 dump_bitmap (file, bb_info->out);
1252 }
1253}
1254
1255/* All of the information associated with every instance of the problem. */
1256
1257static struct df_problem problem_RD =
1258{
1259 DF_RD, /* Problem id. */
1260 DF_FORWARD, /* Direction. */
1261 df_rd_alloc, /* Allocate the problem specific data. */
1262 df_rd_free_bb_info, /* Free basic block info. */
1263 df_rd_local_compute, /* Local compute function. */
1264 df_rd_init_solution, /* Init the solution specific data. */
1265 df_iterative_dataflow, /* Iterative solver. */
1266 NULL, /* Confluence operator 0. */
1267 df_rd_confluence_n, /* Confluence operator n. */
1268 df_rd_transfer_function, /* Transfer function. */
1269 NULL, /* Finalize function. */
1270 df_rd_free, /* Free all of the problem information. */
1271 df_rd_dump, /* Debugging. */
1272 NULL /* Dependent problem. */
1273};
1274
1275
1276
1277/* Create a new DATAFLOW instance and add it to an existing instance
1278 of DF. The returned structure is what is used to get at the
1279 solution. */
1280
1281struct dataflow *
1282df_rd_add_problem (struct df *df)
1283{
1284 return df_add_problem (df, &problem_RD);
1285}
1286
1287
1288\f
1289/*----------------------------------------------------------------------------
1290 LIVE REGISTERS
1291
1292 Find the locations in the function where any use of a pseudo can reach
1293 in the backwards direction.
1294----------------------------------------------------------------------------*/
1295
1296/* Get basic block info. */
1297
1298struct df_lr_bb_info *
1299df_lr_get_bb_info (struct dataflow *dflow, unsigned int index)
1300{
1301 return (struct df_lr_bb_info *) dflow->block_info[index];
1302}
1303
1304
1305/* Set basic block info. */
1306
1307static void
1308df_lr_set_bb_info (struct dataflow *dflow, unsigned int index,
1309 struct df_lr_bb_info *bb_info)
1310{
1311 dflow->block_info[index] = bb_info;
1312}
1313
1314
1315/* Free basic block info. */
1316
1317static void
1318df_lr_free_bb_info (struct dataflow *dflow, void *vbb_info)
1319{
1320 struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
1321 if (bb_info)
1322 {
1323 BITMAP_FREE (bb_info->use);
1324 BITMAP_FREE (bb_info->def);
1325 BITMAP_FREE (bb_info->in);
1326 BITMAP_FREE (bb_info->out);
1327 pool_free (dflow->block_pool, bb_info);
1328 }
1329}
1330
1331
1332/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1333 not touched unless the block is new. */
1334
1335static void
1336df_lr_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
1337{
1338 unsigned int bb_index;
1339 bitmap_iterator bi;
1340
1341 if (! dflow->block_pool)
1342 dflow->block_pool = create_alloc_pool ("df_lr_block pool",
1343 sizeof (struct df_lr_bb_info), 50);
1344
1345 df_grow_bb_info (dflow);
1346
1347 /* Because of the clustering of all def sites for the same pseudo,
1348 we have to process all of the blocks before doing the
1349 analysis. */
1350
1351 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1352 {
1353 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1354 if (bb_info)
1355 {
1356 bitmap_clear (bb_info->def);
1357 bitmap_clear (bb_info->use);
1358 }
1359 else
1360 {
1361 bb_info = (struct df_lr_bb_info *) pool_alloc (dflow->block_pool);
1362 df_lr_set_bb_info (dflow, bb_index, bb_info);
1363 bb_info->use = BITMAP_ALLOC (NULL);
1364 bb_info->def = BITMAP_ALLOC (NULL);
1365 bb_info->in = BITMAP_ALLOC (NULL);
1366 bb_info->out = BITMAP_ALLOC (NULL);
1367 }
1368 }
1369}
1370
1371
1372/* Compute local live register info for basic block BB. */
1373
1374static void
1375df_lr_bb_local_compute (struct dataflow *dflow,
1376 struct df *df, unsigned int bb_index)
1377{
1378 basic_block bb = BASIC_BLOCK (bb_index);
1379 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1380 rtx insn;
1381 struct df_ref *def;
1382 struct df_ref *use;
1383
1384 /* Process the hardware registers that are always live. */
1385 for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1386 /* Add use to set of uses in this BB. */
1387 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
1388 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1389
1390 FOR_BB_INSNS_REVERSE (bb, insn)
1391 {
1392 unsigned int uid = INSN_UID (insn);
1393
1394 if (! INSN_P (insn))
1395 continue;
1396
1397 if (CALL_P (insn))
1398 {
1399 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1400 {
1401 unsigned int dregno = DF_REF_REGNO (def);
1402
1403 if (dregno >= FIRST_PSEUDO_REGISTER
1404 || !(SIBLING_CALL_P (insn)
1405 && bitmap_bit_p (df->exit_block_uses, dregno)
1406 && !refers_to_regno_p (dregno, dregno+1,
1407 current_function_return_rtx,
1408 (rtx *)0)))
1409 {
1410 /* Add def to set of defs in this BB. */
1411 bitmap_set_bit (bb_info->def, dregno);
1412 bitmap_clear_bit (bb_info->use, dregno);
1413 }
1414 }
1415 }
1416 else
1417 {
1418 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1419 {
1420 unsigned int dregno = DF_REF_REGNO (def);
1421
1422 if (DF_INSN_CONTAINS_ASM (df, insn)
1423 && dregno < FIRST_PSEUDO_REGISTER)
1424 {
1425 unsigned int i;
1426 unsigned int end =
1427 dregno + hard_regno_nregs[dregno][GET_MODE (DF_REF_REG (def))] - 1;
1428 for (i = dregno; i <= end; ++i)
1429 regs_asm_clobbered[i] = 1;
1430 }
1431 /* Add def to set of defs in this BB. */
1432 bitmap_set_bit (bb_info->def, dregno);
1433 bitmap_clear_bit (bb_info->use, dregno);
1434 }
1435 }
1436
1437 for (use = DF_INSN_UID_GET (df, uid)->uses; use; use = use->next_ref)
1438 /* Add use to set of uses in this BB. */
1439 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1440 }
1441
1442 /* Process the registers set in an exception handler. */
1443 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1444 {
1445 unsigned int dregno = DF_REF_REGNO (def);
1446 bitmap_set_bit (bb_info->def, dregno);
1447 bitmap_clear_bit (bb_info->use, dregno);
1448 }
1449
1450#ifdef EH_USES
1451 /* Process the uses that are live into an exception handler. */
1452 for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1453 /* Add use to set of uses in this BB. */
1454 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
1455 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1456#endif
1457}
1458
1459/* Compute local live register info for each basic block within BLOCKS. */
1460
1461static void
1462df_lr_local_compute (struct dataflow *dflow,
1463 bitmap all_blocks,
1464 bitmap rescan_blocks)
1465{
1466 struct df *df = dflow->df;
1467 unsigned int bb_index;
1468 bitmap_iterator bi;
1469
1470 /* Assume that the stack pointer is unchanging if alloca hasn't
1471 been used. */
1472 if (bitmap_equal_p (all_blocks, rescan_blocks))
1473 memset (regs_asm_clobbered, 0, sizeof (regs_asm_clobbered));
1474
1475 bitmap_clear (df->hardware_regs_used);
1476
1477 /* The all-important stack pointer must always be live. */
1478 bitmap_set_bit (df->hardware_regs_used, STACK_POINTER_REGNUM);
1479
1480 /* Before reload, there are a few registers that must be forced
1481 live everywhere -- which might not already be the case for
1482 blocks within infinite loops. */
1483 if (! reload_completed)
1484 {
1485 /* Any reference to any pseudo before reload is a potential
1486 reference of the frame pointer. */
1487 bitmap_set_bit (df->hardware_regs_used, FRAME_POINTER_REGNUM);
1488
1489#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1490 /* Pseudos with argument area equivalences may require
1491 reloading via the argument pointer. */
1492 if (fixed_regs[ARG_POINTER_REGNUM])
1493 bitmap_set_bit (df->hardware_regs_used, ARG_POINTER_REGNUM);
1494#endif
1495
1496 /* Any constant, or pseudo with constant equivalences, may
1497 require reloading from memory using the pic register. */
1498 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1499 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1500 bitmap_set_bit (df->hardware_regs_used, PIC_OFFSET_TABLE_REGNUM);
1501 }
1502
1503 if (bitmap_bit_p (rescan_blocks, EXIT_BLOCK))
1504 {
1505 /* The exit block is special for this problem and its bits are
1506 computed from thin air. */
1507 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, EXIT_BLOCK);
1508 bitmap_copy (bb_info->use, df->exit_block_uses);
1509 }
1510
1511 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1512 {
1513 if (bb_index == EXIT_BLOCK)
1514 continue;
1515 df_lr_bb_local_compute (dflow, df, bb_index);
1516 }
1517}
1518
1519
1520/* Initialize the solution vectors. */
1521
1522static void
1523df_lr_init (struct dataflow *dflow, bitmap all_blocks)
1524{
1525 unsigned int bb_index;
1526 bitmap_iterator bi;
1527
1528 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1529 {
1530 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1531 bitmap_copy (bb_info->in, bb_info->use);
1532 bitmap_clear (bb_info->out);
1533 }
1534}
1535
1536
1537/* Confluence function that processes infinite loops. This might be a
1538 noreturn function that throws. And even if it isn't, getting the
1539 unwind info right helps debugging. */
1540static void
1541df_lr_confluence_0 (struct dataflow *dflow, basic_block bb)
1542{
1543 struct df *df = dflow->df;
1544
1545 bitmap op1 = df_lr_get_bb_info (dflow, bb->index)->out;
1546 if (bb != EXIT_BLOCK_PTR)
1547 bitmap_copy (op1, df->hardware_regs_used);
1548}
1549
1550
1551/* Confluence function that ignores fake edges. */
1552static void
1553df_lr_confluence_n (struct dataflow *dflow, edge e)
1554{
1555 bitmap op1 = df_lr_get_bb_info (dflow, e->src->index)->out;
1556 bitmap op2 = df_lr_get_bb_info (dflow, e->dest->index)->in;
1557
1558 /* Call-clobbered registers die across exception and call edges. */
1559 /* ??? Abnormal call edges ignored for the moment, as this gets
1560 confused by sibling call edges, which crashes reg-stack. */
1561 if (e->flags & EDGE_EH)
1562 bitmap_ior_and_compl_into (op1, op2, df_invalidated_by_call);
1563 else
1564 bitmap_ior_into (op1, op2);
1565
1566 bitmap_ior_into (op1, dflow->df->hardware_regs_used);
1567}
1568
1569
1570/* Transfer function. */
1571static bool
1572df_lr_transfer_function (struct dataflow *dflow, int bb_index)
1573{
1574 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1575 bitmap in = bb_info->in;
1576 bitmap out = bb_info->out;
1577 bitmap use = bb_info->use;
1578 bitmap def = bb_info->def;
1579
1580 return bitmap_ior_and_compl (in, use, out, def);
1581}
1582
1583
1584/* Free all storage associated with the problem. */
1585
1586static void
1587df_lr_free (struct dataflow *dflow)
1588{
1589 unsigned int i;
1590 for (i = 0; i < dflow->block_info_size; i++)
1591 {
1592 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, i);
1593 if (bb_info)
1594 {
1595 BITMAP_FREE (bb_info->use);
1596 BITMAP_FREE (bb_info->def);
1597 BITMAP_FREE (bb_info->in);
1598 BITMAP_FREE (bb_info->out);
1599 }
1600 }
1601 free_alloc_pool (dflow->block_pool);
1602
1603 dflow->block_info_size = 0;
1604 free (dflow->block_info);
1605 free (dflow);
1606}
1607
1608
1609/* Debugging info. */
1610
1611static void
1612df_lr_dump (struct dataflow *dflow, FILE *file)
1613{
1614 basic_block bb;
1615
1616 fprintf (file, "Live Registers:\n");
1617 FOR_ALL_BB (bb)
1618 {
1619 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb->index);
1620 df_print_bb_index (bb, file);
1621
1622 if (!bb_info->in)
1623 continue;
1624
1625 fprintf (file, " in \t");
1626 dump_bitmap (file, bb_info->in);
1627 fprintf (file, " use \t");
1628 dump_bitmap (file, bb_info->use);
1629 fprintf (file, " def \t");
1630 dump_bitmap (file, bb_info->def);
1631 fprintf (file, " out \t");
1632 dump_bitmap (file, bb_info->out);
1633 }
1634}
1635
1636/* All of the information associated with every instance of the problem. */
1637
1638static struct df_problem problem_LR =
1639{
1640 DF_LR, /* Problem id. */
1641 DF_BACKWARD, /* Direction. */
1642 df_lr_alloc, /* Allocate the problem specific data. */
1643 df_lr_free_bb_info, /* Free basic block info. */
1644 df_lr_local_compute, /* Local compute function. */
1645 df_lr_init, /* Init the solution specific data. */
1646 df_iterative_dataflow, /* Iterative solver. */
1647 df_lr_confluence_0, /* Confluence operator 0. */
1648 df_lr_confluence_n, /* Confluence operator n. */
1649 df_lr_transfer_function, /* Transfer function. */
1650 NULL, /* Finalize function. */
1651 df_lr_free, /* Free all of the problem information. */
1652 df_lr_dump, /* Debugging. */
1653 NULL /* Dependent problem. */
1654};
1655
1656
1657/* Create a new DATAFLOW instance and add it to an existing instance
1658 of DF. The returned structure is what is used to get at the
1659 solution. */
1660
1661struct dataflow *
1662df_lr_add_problem (struct df *df)
1663{
1664 return df_add_problem (df, &problem_LR);
1665}
1666
1667
1668\f
1669/*----------------------------------------------------------------------------
1670 UNINITIALIZED REGISTERS
1671
1672 Find the set of uses for registers that are reachable from the entry
1673 block without passing thru a definition.
1674----------------------------------------------------------------------------*/
1675
1676/* Get basic block info. */
1677
1678struct df_ur_bb_info *
1679df_ur_get_bb_info (struct dataflow *dflow, unsigned int index)
1680{
1681 return (struct df_ur_bb_info *) dflow->block_info[index];
1682}
1683
1684
1685/* Set basic block info. */
1686
1687static void
1688df_ur_set_bb_info (struct dataflow *dflow, unsigned int index,
1689 struct df_ur_bb_info *bb_info)
1690{
1691 dflow->block_info[index] = bb_info;
1692}
1693
1694
1695/* Free basic block info. */
1696
1697static void
1698df_ur_free_bb_info (struct dataflow *dflow, void *vbb_info)
1699{
1700 struct df_ur_bb_info *bb_info = (struct df_ur_bb_info *) vbb_info;
1701 if (bb_info)
1702 {
1703 BITMAP_FREE (bb_info->gen);
1704 BITMAP_FREE (bb_info->kill);
1705 BITMAP_FREE (bb_info->in);
1706 BITMAP_FREE (bb_info->out);
1707 pool_free (dflow->block_pool, bb_info);
1708 }
1709}
1710
1711
1712/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1713 not touched unless the block is new. */
1714
1715static void
1716df_ur_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
1717{
1718 unsigned int bb_index;
1719 bitmap_iterator bi;
1720
1721 if (! dflow->block_pool)
1722 dflow->block_pool = create_alloc_pool ("df_ur_block pool",
1723 sizeof (struct df_ur_bb_info), 100);
1724
1725 df_grow_bb_info (dflow);
1726
1727 /* Because of the clustering of all def sites for the same pseudo,
1728 we have to process all of the blocks before doing the
1729 analysis. */
1730
1731 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1732 {
1733 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1734 if (bb_info)
1735 {
1736 bitmap_clear (bb_info->kill);
1737 bitmap_clear (bb_info->gen);
1738 }
1739 else
1740 {
1741 bb_info = (struct df_ur_bb_info *) pool_alloc (dflow->block_pool);
1742 df_ur_set_bb_info (dflow, bb_index, bb_info);
1743 bb_info->kill = BITMAP_ALLOC (NULL);
1744 bb_info->gen = BITMAP_ALLOC (NULL);
1745 bb_info->in = BITMAP_ALLOC (NULL);
1746 bb_info->out = BITMAP_ALLOC (NULL);
1747 }
1748 }
1749}
1750
1751
1752/* Compute local uninitialized register info for basic block BB. */
1753
1754static void
1755df_ur_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
1756{
1757 struct df *df = dflow->df;
1758 basic_block bb = BASIC_BLOCK (bb_index);
1759 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1760 rtx insn;
1761 struct df_ref *def;
1762
1763 bitmap_clear (seen_in_block);
1764 bitmap_clear (seen_in_insn);
1765
1766 FOR_BB_INSNS_REVERSE (bb, insn)
1767 {
1768 unsigned int uid = INSN_UID (insn);
1769 if (!INSN_P (insn))
1770 continue;
1771
1772 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1773 {
1774 unsigned int regno = DF_REF_REGNO (def);
1775 /* Only the last def counts. */
1776 if (!bitmap_bit_p (seen_in_block, regno))
1777 {
1778 bitmap_set_bit (seen_in_insn, regno);
1779
1780 if (DF_REF_FLAGS (def) & DF_REF_CLOBBER)
1781 bitmap_set_bit (bb_info->kill, regno);
1782 else
1783 bitmap_set_bit (bb_info->gen, regno);
1784 }
1785 }
1786 bitmap_ior_into (seen_in_block, seen_in_insn);
1787 bitmap_clear (seen_in_insn);
1788 }
1789
1790 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1791 {
1792 unsigned int regno = DF_REF_REGNO (def);
1793 if (!bitmap_bit_p (seen_in_block, regno))
1794 {
1795 bitmap_set_bit (seen_in_block, regno);
1796 bitmap_set_bit (bb_info->gen, regno);
1797 }
1798 }
1799}
1800
1801
1802/* Compute local uninitialized register info. */
1803
1804static void
1805df_ur_local_compute (struct dataflow *dflow,
1806 bitmap all_blocks ATTRIBUTE_UNUSED,
1807 bitmap rescan_blocks)
1808{
1809 unsigned int bb_index;
1810 bitmap_iterator bi;
1811
1812 df_set_seen ();
1813
1814 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1815 {
1816 df_ur_bb_local_compute (dflow, bb_index);
1817 }
1818
1819 df_unset_seen ();
1820}
1821
1822
1823/* Initialize the solution vectors. */
1824
1825static void
1826df_ur_init (struct dataflow *dflow, bitmap all_blocks)
1827{
1828 unsigned int bb_index;
1829 bitmap_iterator bi;
1830
1831 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1832 {
1833 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1834
1835 bitmap_copy (bb_info->out, bb_info->gen);
1836 bitmap_clear (bb_info->in);
1837 }
1838}
1839
1840
1841/* Or in the stack regs, hard regs and early clobber regs into the the
1842 ur_in sets of all of the blocks. */
1843
1844static void
1845df_ur_local_finalize (struct dataflow *dflow, bitmap all_blocks)
1846{
1847 struct df *df = dflow->df;
1848 struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
1849 bitmap tmp = BITMAP_ALLOC (NULL);
1850 bitmap_iterator bi;
1851 unsigned int bb_index;
1852
1853 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1854 {
1855 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1856 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
1857
1858 bitmap_ior_into (bb_info->in, df_all_hard_regs);
1859 bitmap_ior_into (bb_info->out, df_all_hard_regs);
1860
1861 /* No register may reach a location where it is not used. Thus
1862 we trim the rr result to the places where it is used. */
1863 bitmap_and_into (bb_info->in, bb_lr_info->in);
1864 bitmap_and_into (bb_info->out, bb_lr_info->out);
1865
1866#if 1
1867 /* Hard registers may still stick in the ur_out set, but not
1868 be in the ur_in set, if their only mention was in a call
1869 in this block. This is because a call kills in the lr
1870 problem but does not kill in the ur problem. To clean
1871 this up, we execute the transfer function on the lr_in
1872 set and then use that to knock bits out of ur_out. */
1873 bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
1874 bb_info->kill);
1875 bitmap_and_into (bb_info->out, tmp);
1876#endif
1877 }
1878
1879 BITMAP_FREE (tmp);
1880}
1881
1882
1883/* Confluence function that ignores fake edges. */
1884
1885static void
1886df_ur_confluence_n (struct dataflow *dflow, edge e)
1887{
1888 bitmap op1 = df_ur_get_bb_info (dflow, e->dest->index)->in;
1889 bitmap op2 = df_ur_get_bb_info (dflow, e->src->index)->out;
1890
1891 if (e->flags & EDGE_FAKE)
1892 return;
1893
1894 bitmap_ior_into (op1, op2);
1895}
1896
1897
1898/* Transfer function. */
1899
1900static bool
1901df_ur_transfer_function (struct dataflow *dflow, int bb_index)
1902{
1903 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1904 bitmap in = bb_info->in;
1905 bitmap out = bb_info->out;
1906 bitmap gen = bb_info->gen;
1907 bitmap kill = bb_info->kill;
1908
1909 return bitmap_ior_and_compl (out, gen, in, kill);
1910}
1911
1912
1913/* Free all storage associated with the problem. */
1914
1915static void
1916df_ur_free (struct dataflow *dflow)
1917{
1918 unsigned int i;
1919
1920 for (i = 0; i < dflow->block_info_size; i++)
1921 {
1922 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, i);
1923 if (bb_info)
1924 {
1925 BITMAP_FREE (bb_info->gen);
1926 BITMAP_FREE (bb_info->kill);
1927 BITMAP_FREE (bb_info->in);
1928 BITMAP_FREE (bb_info->out);
1929 }
1930 }
1931
1932 free_alloc_pool (dflow->block_pool);
1933 dflow->block_info_size = 0;
1934 free (dflow->block_info);
1935 free (dflow);
1936}
1937
1938
1939/* Debugging info. */
1940
1941static void
1942df_ur_dump (struct dataflow *dflow, FILE *file)
1943{
1944 basic_block bb;
1945
1946 fprintf (file, "Undefined regs:\n");
1947
1948 FOR_ALL_BB (bb)
1949 {
1950 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb->index);
1951 df_print_bb_index (bb, file);
1952
1953 if (! bb_info->in)
1954 continue;
1955
1956 fprintf (file, " in \t");
1957 dump_bitmap (file, bb_info->in);
1958 fprintf (file, " gen \t");
1959 dump_bitmap (file, bb_info->gen);
1960 fprintf (file, " kill\t");
1961 dump_bitmap (file, bb_info->kill);
1962 fprintf (file, " out \t");
1963 dump_bitmap (file, bb_info->out);
1964 }
1965}
1966
1967/* All of the information associated with every instance of the problem. */
1968
1969static struct df_problem problem_UR =
1970{
1971 DF_UR, /* Problem id. */
1972 DF_FORWARD, /* Direction. */
1973 df_ur_alloc, /* Allocate the problem specific data. */
1974 df_ur_free_bb_info, /* Free basic block info. */
1975 df_ur_local_compute, /* Local compute function. */
1976 df_ur_init, /* Init the solution specific data. */
1977 df_iterative_dataflow, /* Iterative solver. */
1978 NULL, /* Confluence operator 0. */
1979 df_ur_confluence_n, /* Confluence operator n. */
1980 df_ur_transfer_function, /* Transfer function. */
1981 df_ur_local_finalize, /* Finalize function. */
1982 df_ur_free, /* Free all of the problem information. */
1983 df_ur_dump, /* Debugging. */
1984 &problem_LR /* Dependent problem. */
1985};
1986
1987
1988/* Create a new DATAFLOW instance and add it to an existing instance
1989 of DF. The returned structure is what is used to get at the
1990 solution. */
1991
1992struct dataflow *
1993df_ur_add_problem (struct df *df)
1994{
1995 return df_add_problem (df, &problem_UR);
1996}
1997
1998
1999\f
2000/*----------------------------------------------------------------------------
2001 UNINITIALIZED REGISTERS WITH EARLYCLOBBER
2002
2003 Find the set of uses for registers that are reachable from the entry
2004 block without passing thru a definition.
2005
2006 This is a variant of the UR problem above that has a lot of special
2007 features just for the register allocation phase.
2008----------------------------------------------------------------------------*/
2009
2010struct df_urec_problem_data
2011{
2012 bool earlyclobbers_found; /* True if any instruction contains an
2013 earlyclobber. */
2014#ifdef STACK_REGS
2015 bitmap stack_regs; /* Registers that may be allocated to a STACK_REGS. */
2016#endif
2017};
2018
2019
2020/* Get basic block info. */
2021
2022struct df_urec_bb_info *
2023df_urec_get_bb_info (struct dataflow *dflow, unsigned int index)
2024{
2025 return (struct df_urec_bb_info *) dflow->block_info[index];
2026}
2027
2028
2029/* Set basic block info. */
2030
2031static void
2032df_urec_set_bb_info (struct dataflow *dflow, unsigned int index,
2033 struct df_urec_bb_info *bb_info)
2034{
2035 dflow->block_info[index] = bb_info;
2036}
2037
2038
2039/* Free basic block info. */
2040
2041static void
2042df_urec_free_bb_info (struct dataflow *dflow, void *vbb_info)
2043{
2044 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info *) vbb_info;
2045 if (bb_info)
2046 {
2047 BITMAP_FREE (bb_info->gen);
2048 BITMAP_FREE (bb_info->kill);
2049 BITMAP_FREE (bb_info->in);
2050 BITMAP_FREE (bb_info->out);
2051 BITMAP_FREE (bb_info->earlyclobber);
2052 pool_free (dflow->block_pool, bb_info);
2053 }
2054}
2055
2056
2057/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
2058 not touched unless the block is new. */
2059
2060static void
2061df_urec_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
2062{
2063 unsigned int bb_index;
2064 bitmap_iterator bi;
2065 struct df_urec_problem_data *problem_data =
2066 (struct df_urec_problem_data *) dflow->problem_data;
2067
2068 if (! dflow->block_pool)
2069 dflow->block_pool = create_alloc_pool ("df_urec_block pool",
2070 sizeof (struct df_urec_bb_info), 50);
2071
2072 if (!dflow->problem_data)
2073 {
2074 problem_data = xmalloc (sizeof (struct df_urec_problem_data));
2075 dflow->problem_data = problem_data;
2076 }
2077 problem_data->earlyclobbers_found = false;
2078
2079 df_grow_bb_info (dflow);
2080
2081 /* Because of the clustering of all def sites for the same pseudo,
2082 we have to process all of the blocks before doing the
2083 analysis. */
2084
2085 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
2086 {
2087 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2088 if (bb_info)
2089 {
2090 bitmap_clear (bb_info->kill);
2091 bitmap_clear (bb_info->gen);
2092 bitmap_clear (bb_info->earlyclobber);
2093 }
2094 else
2095 {
2096 bb_info = (struct df_urec_bb_info *) pool_alloc (dflow->block_pool);
2097 df_urec_set_bb_info (dflow, bb_index, bb_info);
2098 bb_info->kill = BITMAP_ALLOC (NULL);
2099 bb_info->gen = BITMAP_ALLOC (NULL);
2100 bb_info->in = BITMAP_ALLOC (NULL);
2101 bb_info->out = BITMAP_ALLOC (NULL);
2102 bb_info->earlyclobber = BITMAP_ALLOC (NULL);
2103 }
2104 }
2105}
2106
2107
2108/* The function modifies local info for register REG being changed in
2109 SETTER. DATA is used to pass the current basic block info. */
2110
2111static void
2112df_urec_mark_reg_change (rtx reg, rtx setter, void *data)
2113{
2114 int regno;
2115 int endregno;
2116 int i;
2117 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2118
2119 if (GET_CODE (reg) == SUBREG)
2120 reg = SUBREG_REG (reg);
2121
2122 if (!REG_P (reg))
2123 return;
2124
2125
2126 endregno = regno = REGNO (reg);
2127 if (regno < FIRST_PSEUDO_REGISTER)
2128 {
2129 endregno +=hard_regno_nregs[regno][GET_MODE (reg)];
2130
2131 for (i = regno; i < endregno; i++)
2132 {
2133 bitmap_set_bit (bb_info->kill, i);
2134
2135 if (GET_CODE (setter) != CLOBBER)
2136 bitmap_set_bit (bb_info->gen, i);
2137 else
2138 bitmap_clear_bit (bb_info->gen, i);
2139 }
2140 }
2141 else
2142 {
2143 bitmap_set_bit (bb_info->kill, regno);
2144
2145 if (GET_CODE (setter) != CLOBBER)
2146 bitmap_set_bit (bb_info->gen, regno);
2147 else
2148 bitmap_clear_bit (bb_info->gen, regno);
2149 }
2150}
2151/* Classes of registers which could be early clobbered in the current
2152 insn. */
2153
2154DEF_VEC_I(int);
2155DEF_VEC_ALLOC_I(int,heap);
2156
2157static VEC(int,heap) *earlyclobber_regclass;
2158
2159/* This function finds and stores register classes that could be early
2160 clobbered in INSN. If any earlyclobber classes are found, the function
2161 returns TRUE, in all other cases it returns FALSE. */
2162
2163static bool
2164df_urec_check_earlyclobber (rtx insn)
2165{
2166 int opno;
2167 bool found = false;
2168
2169 extract_insn (insn);
2170
2171 VEC_truncate (int, earlyclobber_regclass, 0);
2172 for (opno = 0; opno < recog_data.n_operands; opno++)
2173 {
2174 char c;
2175 bool amp_p;
2176 int i;
2177 enum reg_class class;
2178 const char *p = recog_data.constraints[opno];
2179
2180 class = NO_REGS;
2181 amp_p = false;
2182 for (;;)
2183 {
2184 c = *p;
2185 switch (c)
2186 {
2187 case '=': case '+': case '?':
2188 case '#': case '!':
2189 case '*': case '%':
2190 case 'm': case '<': case '>': case 'V': case 'o':
2191 case 'E': case 'F': case 'G': case 'H':
2192 case 's': case 'i': case 'n':
2193 case 'I': case 'J': case 'K': case 'L':
2194 case 'M': case 'N': case 'O': case 'P':
2195 case 'X':
2196 case '0': case '1': case '2': case '3': case '4':
2197 case '5': case '6': case '7': case '8': case '9':
2198 /* These don't say anything we care about. */
2199 break;
2200
2201 case '&':
2202 amp_p = true;
2203 break;
2204 case '\0':
2205 case ',':
2206 if (amp_p && class != NO_REGS)
2207 {
2208 int rc;
2209
2210 found = true;
2211 for (i = 0;
2212 VEC_iterate (int, earlyclobber_regclass, i, rc);
2213 i++)
2214 {
2215 if (rc == (int) class)
2216 goto found_rc;
2217 }
2218
2219 /* We use VEC_quick_push here because
2220 earlyclobber_regclass holds no more than
2221 N_REG_CLASSES elements. */
2222 VEC_quick_push (int, earlyclobber_regclass, (int) class);
2223 found_rc:
2224 ;
2225 }
2226
2227 amp_p = false;
2228 class = NO_REGS;
2229 break;
2230
2231 case 'r':
2232 class = GENERAL_REGS;
2233 break;
2234
2235 default:
2236 class = REG_CLASS_FROM_CONSTRAINT (c, p);
2237 break;
2238 }
2239 if (c == '\0')
2240 break;
2241 p += CONSTRAINT_LEN (c, p);
2242 }
2243 }
2244
2245 return found;
2246}
2247
2248/* The function checks that pseudo-register *X has a class
2249 intersecting with the class of pseudo-register could be early
2250 clobbered in the same insn.
2251
2252 This function is a no-op if earlyclobber_regclass is empty.
2253
2254 Reload can assign the same hard register to uninitialized
2255 pseudo-register and early clobbered pseudo-register in an insn if
2256 the pseudo-register is used first time in given BB and not lived at
2257 the BB start. To prevent this we don't change life information for
2258 such pseudo-registers. */
2259
2260static int
2261df_urec_mark_reg_use_for_earlyclobber (rtx *x, void *data)
2262{
2263 enum reg_class pref_class, alt_class;
2264 int i, regno;
2265 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2266
2267 if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
2268 {
2269 int rc;
2270
2271 regno = REGNO (*x);
2272 if (bitmap_bit_p (bb_info->kill, regno)
2273 || bitmap_bit_p (bb_info->gen, regno))
2274 return 0;
2275 pref_class = reg_preferred_class (regno);
2276 alt_class = reg_alternate_class (regno);
2277 for (i = 0; VEC_iterate (int, earlyclobber_regclass, i, rc); i++)
2278 {
2279 if (reg_classes_intersect_p (rc, pref_class)
2280 || (rc != NO_REGS
2281 && reg_classes_intersect_p (rc, alt_class)))
2282 {
2283 bitmap_set_bit (bb_info->earlyclobber, regno);
2284 break;
2285 }
2286 }
2287 }
2288 return 0;
2289}
2290
2291/* The function processes all pseudo-registers in *X with the aid of
2292 previous function. */
2293
2294static void
2295df_urec_mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
2296{
2297 for_each_rtx (x, df_urec_mark_reg_use_for_earlyclobber, data);
2298}
2299
2300
2301/* Compute local uninitialized register info for basic block BB. */
2302
2303static void
2304df_urec_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
2305{
2306 struct df *df = dflow->df;
2307 basic_block bb = BASIC_BLOCK (bb_index);
2308 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2309 rtx insn;
2310 struct df_ref *def;
2311
2312 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2313 {
2314 unsigned int regno = DF_REF_REGNO (def);
2315 bitmap_set_bit (bb_info->gen, regno);
2316 }
2317
2318 FOR_BB_INSNS (bb, insn)
2319 {
2320 if (INSN_P (insn))
2321 {
2322 note_stores (PATTERN (insn), df_urec_mark_reg_change, bb_info);
2323 if (df_state & (DF_SCAN_GLOBAL | DF_SCAN_POST_ALLOC)
2324 && df_urec_check_earlyclobber (insn))
2325 {
2326 struct df_urec_problem_data *problem_data =
2327 (struct df_urec_problem_data *) dflow->problem_data;
2328 problem_data->earlyclobbers_found = true;
2329 note_uses (&PATTERN (insn),
2330 df_urec_mark_reg_use_for_earlyclobber_1, bb_info);
2331 }
2332 }
2333 }
2334}
2335
2336
2337/* Compute local uninitialized register info. */
2338
2339static void
2340df_urec_local_compute (struct dataflow *dflow,
2341 bitmap all_blocks ATTRIBUTE_UNUSED,
2342 bitmap rescan_blocks)
2343{
2344 unsigned int bb_index;
2345 bitmap_iterator bi;
2346#ifdef STACK_REGS
2347 int i;
2348 HARD_REG_SET zero, stack_hard_regs, used;
2349 struct df_urec_problem_data *problem_data =
2350 (struct df_urec_problem_data *) dflow->problem_data;
2351
2352 /* Any register that MAY be allocated to a register stack (like the
2353 387) is treated poorly. Each such register is marked as being
2354 live everywhere. This keeps the register allocator and the
2355 subsequent passes from doing anything useful with these values.
2356
2357 FIXME: This seems like an incredibly poor idea. */
2358
2359 CLEAR_HARD_REG_SET (zero);
2360 CLEAR_HARD_REG_SET (stack_hard_regs);
2361 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
2362 SET_HARD_REG_BIT (stack_hard_regs, i);
2363 problem_data->stack_regs = BITMAP_ALLOC (NULL);
2364 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2365 {
2366 COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
2367 IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
2368 AND_HARD_REG_SET (used, stack_hard_regs);
2369 GO_IF_HARD_REG_EQUAL (used, zero, skip);
2370 bitmap_set_bit (problem_data->stack_regs, i);
2371 skip:
2372 ;
2373 }
2374#endif
2375
2376 /* We know that earlyclobber_regclass holds no more than
2377 N_REG_CLASSES elements. See df_urec_check_earlyclobber. */
2378 earlyclobber_regclass = VEC_alloc (int, heap, N_REG_CLASSES);
2379
2380 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
2381 {
2382 df_urec_bb_local_compute (dflow, bb_index);
2383 }
2384
2385 VEC_free (int, heap, earlyclobber_regclass);
2386}
2387
2388
2389/* Initialize the solution vectors. */
2390
2391static void
2392df_urec_init (struct dataflow *dflow, bitmap all_blocks)
2393{
2394 unsigned int bb_index;
2395 bitmap_iterator bi;
2396
2397 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2398 {
2399 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2400
2401 /* FIXME: This is a hack, it has been copied over from
2402 make_accurate_live_analysis by Vlad. Most likely it is necessary
2403 because the generation of gen and kill information for hardware
2404 registers in ur is a subset of what is really necessary and what
2405 is done for the lr problem. */
2406
2407 /* Inside the register allocator, partial availability is only
2408 allowed for the psuedo registers. To implement this, the rr is
2409 initially iored with a mask ones for the hard registers and zeros
2410 for the pseudos before being iterated. This means that each
2411 hardware register will be live unless explicitly killed by some
2412 statement. Eventually most of these bit will die because the
2413 results of rr are anded with the results of lr before being used.
2414 Outside of register allocation, a more conservative strategy of
2415 completely ignoring the unintialized registers is imployed in the
2416 finalizer function. */
2417 if (df_state & DF_SCAN_GLOBAL)
2418 {
2419 bitmap_ior (bb_info->out, bb_info->gen, df_all_hard_regs);
2420 bitmap_copy (bb_info->in, df_all_hard_regs);
2421 }
2422 else
2423 {
2424 bitmap_copy (bb_info->out, bb_info->gen);
2425 bitmap_clear (bb_info->in);
2426 }
2427 }
2428}
2429
2430
2431/* Or in the stack regs, hard regs and early clobber regs into the the
2432 ur_in sets of all of the blocks. */
2433
2434static void
2435df_urec_local_finalize (struct dataflow *dflow, bitmap all_blocks)
2436{
2437 struct df *df = dflow->df;
2438 struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
2439 bitmap tmp = BITMAP_ALLOC (NULL);
2440 bitmap_iterator bi;
2441 unsigned int bb_index;
2442 struct df_urec_problem_data *problem_data =
2443 (struct df_urec_problem_data *) dflow->problem_data;
2444
2445 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2446 {
2447 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2448 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
2449
2450 if (bb_index != ENTRY_BLOCK && bb_index != EXIT_BLOCK)
2451 {
2452 if (problem_data->earlyclobbers_found)
2453 bitmap_ior_into (bb_info->in, bb_info->earlyclobber);
2454
2455#ifdef STACK_REGS
2456 /* We can not use the same stack register for uninitialized
2457 pseudo-register and another living pseudo-register
2458 because if the uninitialized pseudo-register dies,
2459 subsequent pass reg-stack will be confused (it will
2460 believe that the other register dies). */
2461 bitmap_ior_into (bb_info->in, problem_data->stack_regs);
2462 bitmap_ior_into (bb_info->out, problem_data->stack_regs);
2463#endif
2464 }
2465
2466 if (!(df_state & DF_SCAN_GLOBAL))
2467 {
2468 bitmap_ior_into (bb_info->in, df_all_hard_regs);
2469 bitmap_ior_into (bb_info->out, df_all_hard_regs);
2470 }
2471
2472 /* No register may reach a location where it is not used. Thus
2473 we trim the rr result to the places where it is used. */
2474 bitmap_and_into (bb_info->in, bb_lr_info->in);
2475 bitmap_and_into (bb_info->out, bb_lr_info->out);
2476
2477#if 1
2478 /* Hard registers may still stick in the ur_out set, but not
2479 be in the ur_in set, if their only mention was in a call
2480 in this block. This is because a call kills in the lr
2481 problem but does not kill in the rr problem. To clean
2482 this up, we execute the transfer function on the lr_in
2483 set and then use that to knock bits out of ur_out. */
2484 bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
2485 bb_info->kill);
2486 bitmap_and_into (bb_info->out, tmp);
2487#endif
2488 }
2489
2490#ifdef STACK_REGS
2491 BITMAP_FREE (problem_data->stack_regs);
2492#endif
2493 BITMAP_FREE (tmp);
2494}
2495
2496
2497/* Confluence function that ignores fake edges. */
2498
2499static void
2500df_urec_confluence_n (struct dataflow *dflow, edge e)
2501{
2502 bitmap op1 = df_urec_get_bb_info (dflow, e->dest->index)->in;
2503 bitmap op2 = df_urec_get_bb_info (dflow, e->src->index)->out;
2504
2505 if (e->flags & EDGE_FAKE)
2506 return;
2507
2508 bitmap_ior_into (op1, op2);
2509}
2510
2511
2512/* Transfer function. */
2513
2514static bool
2515df_urec_transfer_function (struct dataflow *dflow, int bb_index)
2516{
2517 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2518 bitmap in = bb_info->in;
2519 bitmap out = bb_info->out;
2520 bitmap gen = bb_info->gen;
2521 bitmap kill = bb_info->kill;
2522
2523 return bitmap_ior_and_compl (out, gen, in, kill);
2524}
2525
2526
2527/* Free all storage associated with the problem. */
2528
2529static void
2530df_urec_free (struct dataflow *dflow)
2531{
2532 unsigned int i;
2533
2534 for (i = 0; i < dflow->block_info_size; i++)
2535 {
2536 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, i);
2537 if (bb_info)
2538 {
2539 BITMAP_FREE (bb_info->gen);
2540 BITMAP_FREE (bb_info->kill);
2541 BITMAP_FREE (bb_info->in);
2542 BITMAP_FREE (bb_info->out);
2543 BITMAP_FREE (bb_info->earlyclobber);
2544 }
2545 }
2546
2547 free_alloc_pool (dflow->block_pool);
2548
2549 dflow->block_info_size = 0;
2550 free (dflow->block_info);
2551 free (dflow->problem_data);
2552 free (dflow);
2553}
2554
2555
2556/* Debugging info. */
2557
2558static void
2559df_urec_dump (struct dataflow *dflow, FILE *file)
2560{
2561 basic_block bb;
2562
2563 fprintf (file, "Undefined regs:\n");
2564
2565 FOR_ALL_BB (bb)
2566 {
2567 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb->index);
2568 df_print_bb_index (bb, file);
2569
2570 if (! bb_info->in)
2571 continue;
2572
2573 fprintf (file, " in \t");
2574 dump_bitmap (file, bb_info->in);
2575 fprintf (file, " gen \t");
2576 dump_bitmap (file, bb_info->gen);
2577 fprintf (file, " kill\t");
2578 dump_bitmap (file, bb_info->kill);
2579 fprintf (file, " ec\t");
2580 dump_bitmap (file, bb_info->earlyclobber);
2581 fprintf (file, " out \t");
2582 dump_bitmap (file, bb_info->out);
2583 }
2584}
2585
2586/* All of the information associated with every instance of the problem. */
2587
2588static struct df_problem problem_UREC =
2589{
2590 DF_UREC, /* Problem id. */
2591 DF_FORWARD, /* Direction. */
2592 df_urec_alloc, /* Allocate the problem specific data. */
2593 df_urec_free_bb_info, /* Free basic block info. */
2594 df_urec_local_compute, /* Local compute function. */
2595 df_urec_init, /* Init the solution specific data. */
2596 df_iterative_dataflow, /* Iterative solver. */
2597 NULL, /* Confluence operator 0. */
2598 df_urec_confluence_n, /* Confluence operator n. */
2599 df_urec_transfer_function, /* Transfer function. */
2600 df_urec_local_finalize, /* Finalize function. */
2601 df_urec_free, /* Free all of the problem information. */
2602 df_urec_dump, /* Debugging. */
2603 &problem_LR /* Dependent problem. */
2604};
2605
2606
2607/* Create a new DATAFLOW instance and add it to an existing instance
2608 of DF. The returned structure is what is used to get at the
2609 solution. */
2610
2611struct dataflow *
2612df_urec_add_problem (struct df *df)
2613{
2614 return df_add_problem (df, &problem_UREC);
2615}
2616
2617
2618\f
2619/*----------------------------------------------------------------------------
2620 CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
2621
2622 Link either the defs to the uses and / or the uses to the defs.
2623
2624 These problems are set up like the other dataflow problems so that
2625 they nicely fit into the framework. They are much simpler and only
2626 involve a single traversal of instructions and an examination of
2627 the reaching defs information (the dependent problem).
2628----------------------------------------------------------------------------*/
2629
2630struct df_chain_problem_data
2631{
2632 int flags;
2633};
2634
2635
2636/* Create def-use or use-def chains. */
2637
2638static void
2639df_chain_alloc (struct dataflow *dflow,
2640 bitmap blocks_to_rescan ATTRIBUTE_UNUSED)
2641{
2642 struct df *df = dflow->df;
2643 unsigned int i;
2644 struct df_chain_problem_data *problem_data =
2645 (struct df_chain_problem_data *) dflow->problem_data;
2646
2647 /* Wholesale destruction of the old chains. */
2648 if (dflow->block_pool)
2649 free_alloc_pool (dflow->block_pool);
2650
2651 dflow->block_pool = create_alloc_pool ("df_chain_chain_block pool",
2652 sizeof (struct df_link), 100);
2653
2654 if (problem_data->flags & DF_DU_CHAIN)
2655 {
2656 if (!df->def_info.refs_organized)
2657 df_reorganize_refs (&df->def_info);
2658
2659 /* Clear out the pointers from the refs. */
2660 for (i = 0; i < DF_DEFS_SIZE (df); i++)
2661 {
2662 struct df_ref *ref = df->def_info.refs[i];
2663 DF_REF_CHAIN (ref) = NULL;
2664 }
2665 }
2666
2667 if (problem_data->flags & DF_UD_CHAIN)
2668 {
2669 if (!df->use_info.refs_organized)
2670 df_reorganize_refs (&df->use_info);
2671 for (i = 0; i < DF_USES_SIZE (df); i++)
2672 {
2673 struct df_ref *ref = df->use_info.refs[i];
2674 DF_REF_CHAIN (ref) = NULL;
2675 }
2676 }
2677}
2678
2679
2680/* Create the chains for a list of USEs. */
2681
2682static void
2683df_chain_create_bb_process_use (struct dataflow *dflow,
2684 struct df_chain_problem_data *problem_data,
2685 bitmap local_rd,
2686 struct df_ref *use,
2687 enum df_ref_flags top_flag)
2688{
2689 struct df *df = dflow->df;
2690 bitmap_iterator bi;
2691 unsigned int def_index;
2692
2693 while (use)
2694 {
2695 /* Do not want to go thur this for an uninitialized var. */
2696 unsigned int uregno = DF_REF_REGNO (use);
2697 int count = DF_REG_DEF_GET (df, uregno)->n_refs;
2698 if (count)
2699 {
2700 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2701 {
2702 unsigned int first_index = DF_REG_DEF_GET (df, uregno)->begin;
2703 unsigned int last_index = first_index + count - 1;
2704
2705 EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
2706 {
2707 struct df_ref *def;
2708 if (def_index > last_index)
2709 break;
2710
2711 def = DF_DEFS_GET (df, def_index);
2712 if (problem_data->flags & DF_DU_CHAIN)
2713 df_chain_create (dflow, def, use);
2714 if (problem_data->flags & DF_UD_CHAIN)
2715 df_chain_create (dflow, use, def);
2716 }
2717 }
2718 }
2719 use = use->next_ref;
2720 }
2721}
2722
2723/* Reset the storage pool that the def-use or use-def chains have been
2724 allocated in. We do not need to re adjust the pointers in the refs,
2725 these have already been clean out.*/
2726
2727/* Create chains from reaching defs bitmaps for basic block BB. */
2728static void
2729df_chain_create_bb (struct dataflow *dflow,
2730 struct dataflow *rd_dflow,
2731 unsigned int bb_index)
2732{
2733 basic_block bb = BASIC_BLOCK (bb_index);
2734 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (rd_dflow, bb_index);
2735 rtx insn;
2736 bitmap cpy = BITMAP_ALLOC (NULL);
2737 struct df *df = dflow->df;
2738 struct df_chain_problem_data *problem_data =
2739 (struct df_chain_problem_data *) dflow->problem_data;
2740 struct df_ref *def;
2741
2742 bitmap_copy (cpy, bb_info->in);
2743
2744 /* Since we are going forwards, process the artificial uses first
2745 then the artificial defs second. */
2746
2747#ifdef EH_USES
2748 /* Create the chains for the artificial uses from the EH_USES at the
2749 beginning of the block. */
2750 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2751 df_get_artificial_uses (df, bb->index),
2752 DF_REF_AT_TOP);
2753#endif
2754
2755 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2756 {
2757 unsigned int dregno = DF_REF_REGNO (def);
2758 bitmap_clear_range (cpy,
2759 DF_REG_DEF_GET (df, dregno)->begin,
2760 DF_REG_DEF_GET (df, dregno)->n_refs);
2761 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
2762 bitmap_set_bit (cpy, DF_REF_ID (def));
2763 }
2764
2765 /* Process the regular instructions next. */
2766 FOR_BB_INSNS (bb, insn)
2767 {
2768 struct df_ref *def;
2769 unsigned int uid = INSN_UID (insn);
2770
2771 if (! INSN_P (insn))
2772 continue;
2773
2774 /* Now scan the uses and link them up with the defs that remain
2775 in the cpy vector. */
2776
2777 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2778 DF_INSN_UID_GET (df, uid)->uses, 0);
2779
2780 /* Since we are going forwards, process the defs second. This
2781 pass only changes the bits in cpy. */
2782 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
2783 {
2784 unsigned int dregno = DF_REF_REGNO (def);
2785 bitmap_clear_range (cpy,
2786 DF_REG_DEF_GET (df, dregno)->begin,
2787 DF_REG_DEF_GET (df, dregno)->n_refs);
2788 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
2789 bitmap_set_bit (cpy, DF_REF_ID (def));
2790 }
2791 }
2792
2793 /* Create the chains for the artificial uses of the hard registers
2794 at the end of the block. */
2795 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2796 df_get_artificial_uses (df, bb->index), 0);
2797}
2798
2799/* Create def-use chains from reaching use bitmaps for basic blocks
2800 in BLOCKS. */
2801
2802static void
2803df_chain_finalize (struct dataflow *dflow, bitmap all_blocks)
2804{
2805 unsigned int bb_index;
2806 bitmap_iterator bi;
2807 struct df *df = dflow->df;
2808 struct dataflow *rd_dflow = df->problems_by_index [DF_RD];
2809
2810 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2811 {
2812 df_chain_create_bb (dflow, rd_dflow, bb_index);
2813 }
2814}
2815
2816
2817/* Free all storage associated with the problem. */
2818
2819static void
2820df_chain_free (struct dataflow *dflow)
2821{
2822 free_alloc_pool (dflow->block_pool);
2823 free (dflow->problem_data);
2824 free (dflow);
2825}
2826
2827
2828/* Debugging info. */
2829
2830static void
2831df_chains_dump (struct dataflow *dflow, FILE *file)
2832{
2833 struct df *df = dflow->df;
2834 unsigned int j;
2835 struct df_chain_problem_data *problem_data =
2836 (struct df_chain_problem_data *) dflow->problem_data;
2837
2838 if (problem_data->flags & DF_DU_CHAIN)
2839 {
2840 fprintf (file, "Def-use chains:\n");
2841 for (j = 0; j < df->def_info.bitmap_size; j++)
2842 {
2843 struct df_ref *def = DF_DEFS_GET (df, j);
2844 if (def)
2845 {
2846 fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
2847 j, DF_REF_BBNO (def),
2848 DF_INSN_LUID (df, DF_REF_INSN (def)),
2849 DF_REF_INSN (def) ? DF_REF_INSN_UID (def) : -1,
2850 DF_REF_REGNO (def));
2851 if (def->flags & DF_REF_READ_WRITE)
2852 fprintf (file, "read/write ");
2853 df_chain_dump (df, DF_REF_CHAIN (def), file);
2854 fprintf (file, "\n");
2855 }
2856 }
2857 }
2858
2859 if (problem_data->flags & DF_UD_CHAIN)
2860 {
2861 fprintf (file, "Use-def chains:\n");
2862 for (j = 0; j < df->use_info.bitmap_size; j++)
2863 {
2864 struct df_ref *use = DF_USES_GET (df, j);
2865 if (use)
2866 {
2867 fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
2868 j, DF_REF_BBNO (use),
2869 DF_REF_INSN (use) ?
2870 DF_INSN_LUID (df, DF_REF_INSN (use))
2871 : -1,
2872 DF_REF_INSN (DF_USES_GET (df, j)) ?
2873 DF_REF_INSN_UID (DF_USES_GET (df,j))
2874 : -1,
2875 DF_REF_REGNO (use));
2876 if (use->flags & DF_REF_READ_WRITE)
2877 fprintf (file, "read/write ");
2878 if (use->flags & DF_REF_STRIPPED)
2879 fprintf (file, "stripped ");
2880 if (use->flags & DF_REF_IN_NOTE)
2881 fprintf (file, "note ");
2882 df_chain_dump (df, DF_REF_CHAIN (use), file);
2883 fprintf (file, "\n");
2884 }
2885 }
2886 }
2887}
2888
2889
2890static struct df_problem problem_CHAIN =
2891{
2892 DF_CHAIN, /* Problem id. */
2893 DF_NONE, /* Direction. */
2894 df_chain_alloc, /* Allocate the problem specific data. */
2895 NULL, /* Free basic block info. */
2896 NULL, /* Local compute function. */
2897 NULL, /* Init the solution specific data. */
2898 NULL, /* Iterative solver. */
2899 NULL, /* Confluence operator 0. */
2900 NULL, /* Confluence operator n. */
2901 NULL, /* Transfer function. */
2902 df_chain_finalize, /* Finalize function. */
2903 df_chain_free, /* Free all of the problem information. */
2904 df_chains_dump, /* Debugging. */
2905 &problem_RD /* Dependent problem. */
2906};
2907
2908
2909/* Create a new DATAFLOW instance and add it to an existing instance
2910 of DF. The returned structure is what is used to get at the
2911 solution. */
2912
2913struct dataflow *
2914df_chain_add_problem (struct df *df, int flags)
2915{
2916 struct df_chain_problem_data *problem_data =
2917 xmalloc (sizeof (struct df_chain_problem_data));
2918 struct dataflow *dflow = df_add_problem (df, &problem_CHAIN);
2919
2920 dflow->problem_data = problem_data;
2921 problem_data->flags = flags;
2922
2923 return dflow;
2924}
2925
2926
2927/*----------------------------------------------------------------------------
2928 REGISTER INFORMATION
2929
2930 Currently this consists of only lifetime information. But the plan is
2931 to enhance it so that it produces all of the register information needed
2932 by the register allocators.
2933----------------------------------------------------------------------------*/
2934
2935
2936struct df_ri_problem_data
2937{
2938 int *lifetime;
2939};
2940
2941
2942/* Allocate the lifetime information. */
2943
2944static void
2945df_ri_alloc (struct dataflow *dflow, bitmap blocks_to_rescan ATTRIBUTE_UNUSED)
2946{
2947 struct df_ri_problem_data *problem_data =
2948 (struct df_ri_problem_data *) dflow->problem_data;
2949
2950 if (!dflow->problem_data)
2951 {
2952 struct df_ri_problem_data *problem_data =
2953 xmalloc (sizeof (struct df_ri_problem_data));
2954 dflow->problem_data = problem_data;
2955 }
2956
2957 problem_data->lifetime = xrealloc (problem_data->lifetime,
2958 max_reg_num () *sizeof (int));
2959 memset (problem_data->lifetime, 0, max_reg_num () *sizeof (int));
2960}
2961
2962/* Compute register info: lifetime, bb, and number of defs and uses
2963 for basic block BB. */
2964
2965static void
2966df_ri_bb_compute (struct dataflow *dflow, unsigned int bb_index, bitmap live)
2967{
2968 struct df *df = dflow->df;
2969 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
2970 struct df_ri_problem_data *problem_data =
2971 (struct df_ri_problem_data *) dflow->problem_data;
2972 basic_block bb = BASIC_BLOCK (bb_index);
2973 rtx insn;
2974
2975 bitmap_copy (live, bb_info->out);
2976
2977 FOR_BB_INSNS_REVERSE (bb, insn)
2978 {
2979 unsigned int uid = INSN_UID (insn);
2980 unsigned int regno;
2981 bitmap_iterator bi;
2982 struct df_ref *def;
2983 struct df_ref *use;
2984
2985 if (! INSN_P (insn))
2986 continue;
2987
2988 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
2989 {
2990 unsigned int dregno = DF_REF_REGNO (def);
2991
2992 /* Kill this register. */
2993 bitmap_clear_bit (live, dregno);
2994 }
2995
2996 for (use = DF_INSN_UID_GET (df, uid)->uses; use; use = use->next_ref)
2997 {
2998 unsigned int uregno = DF_REF_REGNO (use);
2999
3000 /* This register is now live. */
3001 bitmap_set_bit (live, uregno);
3002 }
3003
3004 /* Increment lifetimes of all live registers. */
3005 EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
3006 {
3007 problem_data->lifetime[regno]++;
3008 }
3009 }
3010}
3011
3012
3013/* Compute register info: lifetime, bb, and number of defs and uses. */
3014static void
3015df_ri_compute (struct dataflow *dflow, bitmap all_blocks ATTRIBUTE_UNUSED,
3016 bitmap blocks_to_scan)
3017{
3018 unsigned int bb_index;
3019 bitmap_iterator bi;
3020 bitmap live;
3021
3022 live = BITMAP_ALLOC (NULL);
3023
3024 EXECUTE_IF_SET_IN_BITMAP (blocks_to_scan, 0, bb_index, bi)
3025 {
3026 df_ri_bb_compute (dflow, bb_index, live);
3027 }
3028
3029 BITMAP_FREE (live);
3030}
3031
3032
3033/* Free all storage associated with the problem. */
3034
3035static void
3036df_ri_free (struct dataflow *dflow)
3037{
3038 struct df_ri_problem_data *problem_data =
3039 (struct df_ri_problem_data *) dflow->problem_data;
3040
3041 free (problem_data->lifetime);
3042 free (dflow->problem_data);
3043 free (dflow);
3044}
3045
3046
3047/* Debugging info. */
3048
3049static void
3050df_ri_dump (struct dataflow *dflow, FILE *file)
3051{
3052 struct df_ri_problem_data *problem_data =
3053 (struct df_ri_problem_data *) dflow->problem_data;
3054 int j;
3055
3056 fprintf (file, "Register info:\n");
3057 for (j = 0; j < max_reg_num (); j++)
3058 {
3059 fprintf (file, "reg %d life %d\n", j, problem_data->lifetime[j]);
3060 }
3061}
3062
3063/* All of the information associated every instance of the problem. */
3064
3065static struct df_problem problem_RI =
3066{
3067 DF_RI, /* Problem id. */
3068 DF_NONE, /* Direction. */
3069 df_ri_alloc, /* Allocate the problem specific data. */
3070 NULL, /* Free basic block info. */
3071 df_ri_compute, /* Local compute function. */
3072 NULL, /* Init the solution specific data. */
3073 NULL, /* Iterative solver. */
3074 NULL, /* Confluence operator 0. */
3075 NULL, /* Confluence operator n. */
3076 NULL, /* Transfer function. */
3077 NULL, /* Finalize function. */
3078 df_ri_free, /* Free all of the problem information. */
3079 df_ri_dump, /* Debugging. */
3080 &problem_UR /* Dependent problem. */
3081};
3082
3083
3084/* Create a new DATAFLOW instance and add it to an existing instance
3085 of DF. The returned structure is what is used to get at the
3086 solution. */
3087
3088struct dataflow *
3089df_ri_add_problem (struct df *df)
3090{
3091 return df_add_problem (df, &problem_RI);
3092}
3093
3094
3095/* Return total lifetime (in insns) of REG. */
3096int
3097df_reg_lifetime (struct df *df, rtx reg)
3098{
3099 struct dataflow *dflow = df->problems_by_index[DF_RI];
3100 struct df_ri_problem_data *problem_data =
3101 (struct df_ri_problem_data *) dflow->problem_data;
3102 return problem_data->lifetime[REGNO (reg)];
3103}
3104
3105
This page took 0.346791 seconds and 5 git commands to generate.