]> gcc.gnu.org Git - gcc.git/blob - gcc/df-problems.c
re PR rtl-optimization/24762 ([killloop-branch] code motion of non-invariant expressi...
[gcc.git] / gcc / df-problems.c
1 /* Standard problems for dataflow support routines.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
3 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 2, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING. If not, write to the Free
23 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 02110-1301, USA. */
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "rtl.h"
31 #include "tm_p.h"
32 #include "insn-config.h"
33 #include "recog.h"
34 #include "function.h"
35 #include "regs.h"
36 #include "output.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "sbitmap.h"
42 #include "bitmap.h"
43 #include "timevar.h"
44 #include "df.h"
45
46 #define DF_SPARSE_THRESHOLD 32
47
48 static bitmap seen_in_block = NULL;
49 static bitmap seen_in_insn = NULL;
50
51 \f
52 /*----------------------------------------------------------------------------
53 Public functions access functions for the dataflow problems.
54 ----------------------------------------------------------------------------*/
55
56 /* Get the instance of the problem that DFLOW is dependent on. */
57
58 struct dataflow *
59 df_get_dependent_problem (struct dataflow *dflow)
60 {
61 struct df *df = dflow->df;
62 struct df_problem *dependent_problem = dflow->problem->dependent_problem;
63
64 gcc_assert (dependent_problem);
65 return df->problems_by_index[dependent_problem->id];
66 }
67
68
69 /* Create a du or ud chain from SRC to DST and link it into SRC. */
70
71 struct df_link *
72 df_chain_create (struct dataflow *dflow, struct df_ref *src, struct df_ref *dst)
73 {
74 struct df_link *head = DF_REF_CHAIN (src);
75 struct df_link *link = pool_alloc (dflow->block_pool);;
76
77 DF_REF_CHAIN (src) = link;
78 link->next = head;
79 link->ref = dst;
80 return link;
81 }
82
83
84 /* Delete a du or ud chain for REF. If LINK is NULL, delete all
85 chains for ref and check to see if the reverse chains can also be
86 deleted. If LINK is not NULL it must be a link off of ref. In
87 this case, the other end is not deleted. */
88
89 void
90 df_chain_unlink (struct dataflow *dflow, struct df_ref *ref, struct df_link *link)
91 {
92 struct df_link *chain = DF_REF_CHAIN (ref);
93 if (link)
94 {
95 /* Link was the first element in the chain. */
96 if (chain == link)
97 DF_REF_CHAIN (ref) = link->next;
98 else
99 {
100 /* Link is an internal element in the chain. */
101 struct df_link *prev = chain;
102 while (chain)
103 {
104 if (chain == link)
105 {
106 prev->next = chain->next;
107 break;
108 }
109 prev = chain;
110 chain = chain->next;
111 }
112 }
113 pool_free (dflow->block_pool, link);
114 }
115 else
116 {
117 /* If chain is NULL here, it was because of a recursive call
118 when the other flavor of chains was not built. Just run thru
119 the entire chain calling the other side and then deleting the
120 link. */
121 while (chain)
122 {
123 struct df_link *next = chain->next;
124 /* Delete the other side if it exists. */
125 df_chain_unlink (dflow, chain->ref, chain);
126 chain = next;
127 }
128 }
129 }
130
131
132 /* Copy the du or ud chain starting at FROM_REF and attach it to
133 TO_REF. */
134
135 void
136 df_chain_copy (struct dataflow *dflow,
137 struct df_ref *to_ref,
138 struct df_link *from_ref)
139 {
140 while (from_ref)
141 {
142 df_chain_create (dflow, to_ref, from_ref->ref);
143 from_ref = from_ref->next;
144 }
145 }
146
147
148 /* Get the live in set for BB no matter what problem happens to be
149 defined. */
150
151 bitmap
152 df_get_live_in (struct df *df, basic_block bb)
153 {
154 gcc_assert (df->problems_by_index[DF_LR]);
155
156 if (df->problems_by_index[DF_UREC])
157 return DF_RA_LIVE_IN (df, bb);
158 else if (df->problems_by_index[DF_UR])
159 return DF_LIVE_IN (df, bb);
160 else
161 return DF_UPWARD_LIVE_IN (df, bb);
162 }
163
164
165 /* Get the live out set for BB no matter what problem happens to be
166 defined. */
167
168 bitmap
169 df_get_live_out (struct df *df, basic_block bb)
170 {
171 gcc_assert (df->problems_by_index[DF_LR]);
172
173 if (df->problems_by_index[DF_UREC])
174 return DF_RA_LIVE_OUT (df, bb);
175 else if (df->problems_by_index[DF_UR])
176 return DF_LIVE_OUT (df, bb);
177 else
178 return DF_UPWARD_LIVE_OUT (df, bb);
179 }
180
181
182 /*----------------------------------------------------------------------------
183 Utility functions.
184 ----------------------------------------------------------------------------*/
185
186 /* Generic versions to get the void* version of the block info. Only
187 used inside the problem instace vectors. */
188
189 /* Grow the bb_info array. */
190
191 void
192 df_grow_bb_info (struct dataflow *dflow)
193 {
194 unsigned int new_size = last_basic_block + 1;
195 if (dflow->block_info_size < new_size)
196 {
197 new_size += new_size / 4;
198 dflow->block_info = xrealloc (dflow->block_info,
199 new_size *sizeof (void*));
200 memset (dflow->block_info + dflow->block_info_size, 0,
201 (new_size - dflow->block_info_size) *sizeof (void *));
202 dflow->block_info_size = new_size;
203 }
204 }
205
206 /* Dump a def-use or use-def chain for REF to FILE. */
207
208 void
209 df_chain_dump (struct df *df ATTRIBUTE_UNUSED, struct df_link *link, FILE *file)
210 {
211 fprintf (file, "{ ");
212 for (; link; link = link->next)
213 {
214 fprintf (file, "%c%d(bb %d insn %d) ",
215 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
216 DF_REF_ID (link->ref),
217 DF_REF_BBNO (link->ref),
218 DF_REF_INSN (link->ref) ? DF_REF_INSN_UID (link->ref) : -1);
219 }
220 fprintf (file, "}");
221 }
222
223
224 /* Print some basic block info as part of df_dump. */
225
226 void
227 df_print_bb_index (basic_block bb, FILE *file)
228 {
229 edge e;
230 edge_iterator ei;
231
232 fprintf (file, "( ");
233 FOR_EACH_EDGE (e, ei, bb->preds)
234 {
235 basic_block pred = e->src;
236 fprintf (file, "%d ", pred->index);
237 }
238 fprintf (file, ")->[%d]->( ", bb->index);
239 FOR_EACH_EDGE (e, ei, bb->succs)
240 {
241 basic_block succ = e->dest;
242 fprintf (file, "%d ", succ->index);
243 }
244 fprintf (file, ")\n");
245 }
246
247
248 /* Return the set of reference ids in CHAIN, caching the result in *BMAP. */
249
250 static inline bitmap
251 df_ref_bitmap (bitmap *maps, unsigned int regno, int start, int count)
252 {
253 bitmap ids = maps[regno];
254 if (!ids)
255 {
256 unsigned int i;
257 unsigned int end = start + count;;
258 ids = BITMAP_ALLOC (NULL);
259 maps[regno] = ids;
260 for (i = start; i < end; i++)
261 bitmap_set_bit (ids, i);
262 }
263 return ids;
264 }
265
266
267 /* Make sure that the seen_in_insn and seen_in_block sbitmaps are set
268 up correctly. */
269
270 static void
271 df_set_seen (void)
272 {
273 seen_in_block = BITMAP_ALLOC (NULL);
274 seen_in_insn = BITMAP_ALLOC (NULL);
275 }
276
277
278 static void
279 df_unset_seen (void)
280 {
281 BITMAP_FREE (seen_in_block);
282 BITMAP_FREE (seen_in_insn);
283 }
284
285
286 \f
287 /*----------------------------------------------------------------------------
288 REACHING USES
289
290 Find the locations in the function where each use site for a pseudo
291 can reach backwards.
292
293 ----------------------------------------------------------------------------*/
294
295 struct df_ru_problem_data
296 {
297 bitmap *use_sites; /* Bitmap of uses for each pseudo. */
298 unsigned int use_sites_size; /* Size of use_sites. */
299 /* The set of defs to regs invalidated by call. */
300 bitmap sparse_invalidated_by_call;
301 /* The set of defs to regs invalidated by call for ru. */
302 bitmap dense_invalidated_by_call;
303 };
304
305 /* Get basic block info. */
306
307 struct df_ru_bb_info *
308 df_ru_get_bb_info (struct dataflow *dflow, unsigned int index)
309 {
310 return (struct df_ru_bb_info *) dflow->block_info[index];
311 }
312
313
314 /* Set basic block info. */
315
316 static void
317 df_ru_set_bb_info (struct dataflow *dflow, unsigned int index,
318 struct df_ru_bb_info *bb_info)
319 {
320 dflow->block_info[index] = bb_info;
321 }
322
323
324 /* Free basic block info. */
325
326 static void
327 df_ru_free_bb_info (struct dataflow *dflow,
328 basic_block bb ATTRIBUTE_UNUSED,
329 void *vbb_info)
330 {
331 struct df_ru_bb_info *bb_info = (struct df_ru_bb_info *) vbb_info;
332 if (bb_info)
333 {
334 BITMAP_FREE (bb_info->kill);
335 BITMAP_FREE (bb_info->sparse_kill);
336 BITMAP_FREE (bb_info->gen);
337 BITMAP_FREE (bb_info->in);
338 BITMAP_FREE (bb_info->out);
339 pool_free (dflow->block_pool, bb_info);
340 }
341 }
342
343
344 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
345 not touched unless the block is new. */
346
347 static void
348 df_ru_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
349 {
350 unsigned int bb_index;
351 bitmap_iterator bi;
352 unsigned int reg_size = max_reg_num ();
353
354 if (! dflow->block_pool)
355 dflow->block_pool = create_alloc_pool ("df_ru_block pool",
356 sizeof (struct df_ru_bb_info), 50);
357
358 if (dflow->problem_data)
359 {
360 unsigned int i;
361 struct df_ru_problem_data *problem_data =
362 (struct df_ru_problem_data *) dflow->problem_data;
363
364 for (i = 0; i < problem_data->use_sites_size; i++)
365 {
366 bitmap bm = problem_data->use_sites[i];
367 if (bm)
368 {
369 BITMAP_FREE (bm);
370 problem_data->use_sites[i] = NULL;
371 }
372 }
373
374 if (problem_data->use_sites_size < reg_size)
375 {
376 problem_data->use_sites
377 = xrealloc (problem_data->use_sites, reg_size * sizeof (bitmap));
378 memset (problem_data->use_sites + problem_data->use_sites_size, 0,
379 (reg_size - problem_data->use_sites_size) * sizeof (bitmap));
380 problem_data->use_sites_size = reg_size;
381 }
382
383 bitmap_clear (problem_data->sparse_invalidated_by_call);
384 bitmap_clear (problem_data->dense_invalidated_by_call);
385 }
386 else
387 {
388 struct df_ru_problem_data *problem_data =
389 xmalloc (sizeof (struct df_ru_problem_data));
390 dflow->problem_data = problem_data;
391
392 problem_data->use_sites = xcalloc (reg_size, sizeof (bitmap));
393 problem_data->use_sites_size = reg_size;
394 problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
395 problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
396 }
397
398 df_grow_bb_info (dflow);
399
400 /* Because of the clustering of all def sites for the same pseudo,
401 we have to process all of the blocks before doing the
402 analysis. */
403
404 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
405 {
406 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
407 if (bb_info)
408 {
409 bitmap_clear (bb_info->kill);
410 bitmap_clear (bb_info->sparse_kill);
411 bitmap_clear (bb_info->gen);
412 }
413 else
414 {
415 bb_info = (struct df_ru_bb_info *) pool_alloc (dflow->block_pool);
416 df_ru_set_bb_info (dflow, bb_index, bb_info);
417 bb_info->kill = BITMAP_ALLOC (NULL);
418 bb_info->sparse_kill = BITMAP_ALLOC (NULL);
419 bb_info->gen = BITMAP_ALLOC (NULL);
420 bb_info->in = BITMAP_ALLOC (NULL);
421 bb_info->out = BITMAP_ALLOC (NULL);
422 }
423 }
424 }
425
426
427 /* Process a list of DEFs for df_ru_bb_local_compute. */
428
429 static void
430 df_ru_bb_local_compute_process_def (struct dataflow *dflow,
431 struct df_ru_bb_info *bb_info,
432 struct df_ref *def,
433 enum df_ref_flags top_flag)
434 {
435 struct df *df = dflow->df;
436 while (def)
437 {
438 if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
439 {
440 unsigned int regno = DF_REF_REGNO (def);
441 unsigned int begin = DF_REG_USE_GET (df, regno)->begin;
442 unsigned int n_uses = DF_REG_USE_GET (df, regno)->n_refs;
443 if (!bitmap_bit_p (seen_in_block, regno))
444 {
445 /* The first def for regno, causes the kill info to be
446 generated and the gen information to cleared. */
447 if (!bitmap_bit_p (seen_in_insn, regno))
448 {
449 if (n_uses > DF_SPARSE_THRESHOLD)
450 {
451 bitmap_set_bit (bb_info->sparse_kill, regno);
452 bitmap_clear_range (bb_info->gen, begin, n_uses);
453 }
454 else
455 {
456 struct df_ru_problem_data * problem_data =
457 (struct df_ru_problem_data *)dflow->problem_data;
458 bitmap uses =
459 df_ref_bitmap (problem_data->use_sites, regno,
460 begin, n_uses);
461 bitmap_ior_into (bb_info->kill, uses);
462 bitmap_and_compl_into (bb_info->gen, uses);
463 }
464 }
465 bitmap_set_bit (seen_in_insn, regno);
466 }
467 }
468 def = def->next_ref;
469 }
470 }
471
472
473 /* Process a list of USEs for df_ru_bb_local_compute. */
474
475 static void
476 df_ru_bb_local_compute_process_use (struct df_ru_bb_info *bb_info,
477 struct df_ref *use,
478 enum df_ref_flags top_flag)
479 {
480 while (use)
481 {
482 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
483 {
484 /* Add use to set of gens in this BB unless we have seen a
485 def in a previous instruction. */
486 unsigned int regno = DF_REF_REGNO (use);
487 if (!bitmap_bit_p (seen_in_block, regno))
488 bitmap_set_bit (bb_info->gen, DF_REF_ID (use));
489 }
490 use = use->next_ref;
491 }
492 }
493
494 /* Compute local reaching use (upward exposed use) info for basic
495 block BB. USE_INFO->REGS[R] caches the set of uses for register R. */
496 static void
497 df_ru_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
498 {
499 struct df *df = dflow->df;
500 basic_block bb = BASIC_BLOCK (bb_index);
501 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
502 rtx insn;
503
504 /* Set when a def for regno is seen. */
505 bitmap_clear (seen_in_block);
506 bitmap_clear (seen_in_insn);
507
508 #ifdef EH_USES
509 /* Variables defined in the prolog that are used by the exception
510 handler. */
511 df_ru_bb_local_compute_process_use (bb_info,
512 df_get_artificial_uses (df, bb_index),
513 DF_REF_AT_TOP);
514 #endif
515 df_ru_bb_local_compute_process_def (dflow, bb_info,
516 df_get_artificial_defs (df, bb_index),
517 DF_REF_AT_TOP);
518
519 FOR_BB_INSNS (bb, insn)
520 {
521 unsigned int uid = INSN_UID (insn);
522 if (! INSN_P (insn))
523 continue;
524
525 df_ru_bb_local_compute_process_def (dflow, bb_info,
526 DF_INSN_UID_GET (df, uid)->defs, 0);
527
528 /* The use processing must happen after the defs processing even
529 though the uses logically happen first since the defs clear
530 the gen set. Otherwise, a use for regno occuring in the same
531 instruction as a def for regno would be cleared. */
532 df_ru_bb_local_compute_process_use (bb_info,
533 DF_INSN_UID_GET (df, uid)->uses, 0);
534
535 bitmap_ior_into (seen_in_block, seen_in_insn);
536 bitmap_clear (seen_in_insn);
537 }
538
539 /* Process the hardware registers that are always live. */
540 df_ru_bb_local_compute_process_use (bb_info,
541 df_get_artificial_uses (df, bb_index), 0);
542
543 df_ru_bb_local_compute_process_def (dflow, bb_info,
544 df_get_artificial_defs (df, bb_index), 0);
545 }
546
547
548 /* Compute local reaching use (upward exposed use) info for each basic
549 block within BLOCKS. */
550 static void
551 df_ru_local_compute (struct dataflow *dflow,
552 bitmap all_blocks,
553 bitmap rescan_blocks ATTRIBUTE_UNUSED)
554 {
555 struct df *df = dflow->df;
556 unsigned int bb_index;
557 bitmap_iterator bi;
558 unsigned int regno;
559 struct df_ru_problem_data *problem_data =
560 (struct df_ru_problem_data *) dflow->problem_data;
561 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
562 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
563
564 df_set_seen ();
565
566 if (!df->use_info.refs_organized)
567 df_reorganize_refs (&df->use_info);
568
569 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
570 {
571 df_ru_bb_local_compute (dflow, bb_index);
572 }
573
574 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
575 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
576 {
577 struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
578 if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
579 bitmap_set_bit (sparse_invalidated, regno);
580 else
581 {
582 bitmap defs = df_ref_bitmap (problem_data->use_sites, regno,
583 reg_info->begin, reg_info->n_refs);
584 bitmap_ior_into (dense_invalidated, defs);
585 }
586 }
587
588 df_unset_seen ();
589 }
590
591
592 /* Initialize the solution bit vectors for problem. */
593
594 static void
595 df_ru_init_solution (struct dataflow *dflow, bitmap all_blocks)
596 {
597 unsigned int bb_index;
598 bitmap_iterator bi;
599
600 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
601 {
602 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
603 bitmap_copy (bb_info->in, bb_info->gen);
604 bitmap_clear (bb_info->out);
605 }
606 }
607
608
609 /* Out of target gets or of in of source. */
610
611 static void
612 df_ru_confluence_n (struct dataflow *dflow, edge e)
613 {
614 bitmap op1 = df_ru_get_bb_info (dflow, e->src->index)->out;
615 bitmap op2 = df_ru_get_bb_info (dflow, e->dest->index)->in;
616
617 if (e->flags & EDGE_EH)
618 {
619 struct df_ru_problem_data *problem_data =
620 (struct df_ru_problem_data *) dflow->problem_data;
621 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
622 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
623 struct df *df = dflow->df;
624 bitmap_iterator bi;
625 unsigned int regno;
626 bitmap tmp = BITMAP_ALLOC (NULL);
627
628 bitmap_copy (tmp, op2);
629 bitmap_and_compl_into (tmp, dense_invalidated);
630
631 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
632 {
633 bitmap_clear_range (tmp,
634 DF_REG_USE_GET (df, regno)->begin,
635 DF_REG_USE_GET (df, regno)->n_refs);
636 }
637 bitmap_ior_into (op1, tmp);
638 BITMAP_FREE (tmp);
639 }
640 else
641 bitmap_ior_into (op1, op2);
642 }
643
644
645 /* Transfer function. */
646
647 static bool
648 df_ru_transfer_function (struct dataflow *dflow, int bb_index)
649 {
650 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
651 unsigned int regno;
652 bitmap_iterator bi;
653 bitmap in = bb_info->in;
654 bitmap out = bb_info->out;
655 bitmap gen = bb_info->gen;
656 bitmap kill = bb_info->kill;
657 bitmap sparse_kill = bb_info->sparse_kill;
658
659 if (bitmap_empty_p (sparse_kill))
660 return bitmap_ior_and_compl (in, gen, out, kill);
661 else
662 {
663 struct df *df = dflow->df;
664 bool changed = false;
665 bitmap tmp = BITMAP_ALLOC (NULL);
666 bitmap_copy (tmp, in);
667 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
668 {
669 bitmap_clear_range (tmp,
670 DF_REG_USE_GET (df, regno)->begin,
671 DF_REG_USE_GET (df, regno)->n_refs);
672 }
673 bitmap_and_compl_into (tmp, kill);
674 bitmap_ior_into (tmp, gen);
675 changed = !bitmap_equal_p (tmp, in);
676 if (changed)
677 {
678 BITMAP_FREE (out);
679 bb_info->in = tmp;
680 }
681 else
682 BITMAP_FREE (tmp);
683 return changed;
684 }
685 }
686
687
688 /* Free all storage associated with the problem. */
689
690 static void
691 df_ru_free (struct dataflow *dflow)
692 {
693 unsigned int i;
694 struct df_ru_problem_data *problem_data =
695 (struct df_ru_problem_data *) dflow->problem_data;
696
697 if (problem_data)
698 {
699 for (i = 0; i < dflow->block_info_size; i++)
700 {
701 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, i);
702 if (bb_info)
703 {
704 BITMAP_FREE (bb_info->kill);
705 BITMAP_FREE (bb_info->sparse_kill);
706 BITMAP_FREE (bb_info->gen);
707 BITMAP_FREE (bb_info->in);
708 BITMAP_FREE (bb_info->out);
709 }
710 }
711
712 free_alloc_pool (dflow->block_pool);
713
714 for (i = 0; i < problem_data->use_sites_size; i++)
715 {
716 bitmap bm = problem_data->use_sites[i];
717 if (bm)
718 BITMAP_FREE (bm);
719 }
720
721 free (problem_data->use_sites);
722 BITMAP_FREE (problem_data->sparse_invalidated_by_call);
723 BITMAP_FREE (problem_data->dense_invalidated_by_call);
724
725 dflow->block_info_size = 0;
726 free (dflow->block_info);
727 free (dflow->problem_data);
728 }
729 free (dflow);
730 }
731
732
733 /* Debugging info. */
734
735 static void
736 df_ru_dump (struct dataflow *dflow, FILE *file)
737 {
738 basic_block bb;
739 struct df *df = dflow->df;
740 struct df_ru_problem_data *problem_data =
741 (struct df_ru_problem_data *) dflow->problem_data;
742 unsigned int m = max_reg_num ();
743 unsigned int regno;
744
745 fprintf (file, "Reaching uses:\n");
746
747 fprintf (file, " sparse invalidated \t");
748 dump_bitmap (file, problem_data->sparse_invalidated_by_call);
749 fprintf (file, " dense invalidated \t");
750 dump_bitmap (file, problem_data->dense_invalidated_by_call);
751
752 for (regno = 0; regno < m; regno++)
753 if (DF_REG_USE_GET (df, regno)->n_refs)
754 fprintf (file, "%d[%d,%d] ", regno,
755 DF_REG_USE_GET (df, regno)->begin,
756 DF_REG_USE_GET (df, regno)->n_refs);
757 fprintf (file, "\n");
758
759 FOR_ALL_BB (bb)
760 {
761 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb->index);
762 df_print_bb_index (bb, file);
763
764 if (! bb_info->in)
765 continue;
766
767 fprintf (file, " in \t");
768 dump_bitmap (file, bb_info->in);
769 fprintf (file, " gen \t");
770 dump_bitmap (file, bb_info->gen);
771 fprintf (file, " kill\t");
772 dump_bitmap (file, bb_info->kill);
773 fprintf (file, " out \t");
774 dump_bitmap (file, bb_info->out);
775 }
776 }
777
778 /* All of the information associated with every instance of the problem. */
779
780 static struct df_problem problem_RU =
781 {
782 DF_RU, /* Problem id. */
783 DF_BACKWARD, /* Direction. */
784 df_ru_alloc, /* Allocate the problem specific data. */
785 NULL, /* Reset global information. */
786 df_ru_free_bb_info, /* Free basic block info. */
787 df_ru_local_compute, /* Local compute function. */
788 df_ru_init_solution, /* Init the solution specific data. */
789 df_iterative_dataflow, /* Iterative solver. */
790 NULL, /* Confluence operator 0. */
791 df_ru_confluence_n, /* Confluence operator n. */
792 df_ru_transfer_function, /* Transfer function. */
793 NULL, /* Finalize function. */
794 df_ru_free, /* Free all of the problem information. */
795 df_ru_dump, /* Debugging. */
796 NULL /* Dependent problem. */
797 };
798
799
800
801 /* Create a new DATAFLOW instance and add it to an existing instance
802 of DF. The returned structure is what is used to get at the
803 solution. */
804
805 struct dataflow *
806 df_ru_add_problem (struct df *df)
807 {
808 return df_add_problem (df, &problem_RU);
809 }
810
811 \f
812 /*----------------------------------------------------------------------------
813 REACHING DEFINITIONS
814
815 Find the locations in the function where each definition site for a
816 pseudo reaches.
817 ----------------------------------------------------------------------------*/
818
819 struct df_rd_problem_data
820 {
821 bitmap *def_sites; /* Bitmap of defs for each pseudo. */
822 unsigned int def_sites_size; /* Size of def_sites. */
823 /* The set of defs to regs invalidated by call. */
824 bitmap sparse_invalidated_by_call;
825 /* The set of defs to regs invalidate by call for ru. */
826 bitmap dense_invalidated_by_call;
827 };
828
829 /* Get basic block info. */
830
831 struct df_rd_bb_info *
832 df_rd_get_bb_info (struct dataflow *dflow, unsigned int index)
833 {
834 return (struct df_rd_bb_info *) dflow->block_info[index];
835 }
836
837
838 /* Set basic block info. */
839
840 static void
841 df_rd_set_bb_info (struct dataflow *dflow, unsigned int index,
842 struct df_rd_bb_info *bb_info)
843 {
844 dflow->block_info[index] = bb_info;
845 }
846
847
848 /* Free basic block info. */
849
850 static void
851 df_rd_free_bb_info (struct dataflow *dflow,
852 basic_block bb ATTRIBUTE_UNUSED,
853 void *vbb_info)
854 {
855 struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
856 if (bb_info)
857 {
858 BITMAP_FREE (bb_info->kill);
859 BITMAP_FREE (bb_info->sparse_kill);
860 BITMAP_FREE (bb_info->gen);
861 BITMAP_FREE (bb_info->in);
862 BITMAP_FREE (bb_info->out);
863 pool_free (dflow->block_pool, bb_info);
864 }
865 }
866
867
868 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
869 not touched unless the block is new. */
870
871 static void
872 df_rd_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
873 {
874 unsigned int bb_index;
875 bitmap_iterator bi;
876 unsigned int reg_size = max_reg_num ();
877
878 if (! dflow->block_pool)
879 dflow->block_pool = create_alloc_pool ("df_rd_block pool",
880 sizeof (struct df_rd_bb_info), 50);
881
882 if (dflow->problem_data)
883 {
884 unsigned int i;
885 struct df_rd_problem_data *problem_data =
886 (struct df_rd_problem_data *) dflow->problem_data;
887
888 for (i = 0; i < problem_data->def_sites_size; i++)
889 {
890 bitmap bm = problem_data->def_sites[i];
891 if (bm)
892 {
893 BITMAP_FREE (bm);
894 problem_data->def_sites[i] = NULL;
895 }
896 }
897
898 if (problem_data->def_sites_size < reg_size)
899 {
900 problem_data->def_sites
901 = xrealloc (problem_data->def_sites, reg_size *sizeof (bitmap));
902 memset (problem_data->def_sites + problem_data->def_sites_size, 0,
903 (reg_size - problem_data->def_sites_size) *sizeof (bitmap));
904 problem_data->def_sites_size = reg_size;
905 }
906
907 bitmap_clear (problem_data->sparse_invalidated_by_call);
908 bitmap_clear (problem_data->dense_invalidated_by_call);
909 }
910 else
911 {
912 struct df_rd_problem_data *problem_data =
913 xmalloc (sizeof (struct df_rd_problem_data));
914 dflow->problem_data = problem_data;
915
916 problem_data->def_sites = xcalloc (reg_size, sizeof (bitmap));
917 problem_data->def_sites_size = reg_size;
918 problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
919 problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
920 }
921
922 df_grow_bb_info (dflow);
923
924 /* Because of the clustering of all def sites for the same pseudo,
925 we have to process all of the blocks before doing the
926 analysis. */
927
928 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
929 {
930 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
931 if (bb_info)
932 {
933 bitmap_clear (bb_info->kill);
934 bitmap_clear (bb_info->sparse_kill);
935 bitmap_clear (bb_info->gen);
936 }
937 else
938 {
939 bb_info = (struct df_rd_bb_info *) pool_alloc (dflow->block_pool);
940 df_rd_set_bb_info (dflow, bb_index, bb_info);
941 bb_info->kill = BITMAP_ALLOC (NULL);
942 bb_info->sparse_kill = BITMAP_ALLOC (NULL);
943 bb_info->gen = BITMAP_ALLOC (NULL);
944 bb_info->in = BITMAP_ALLOC (NULL);
945 bb_info->out = BITMAP_ALLOC (NULL);
946 }
947 }
948 }
949
950
951 /* Process a list of DEFs for df_rd_bb_local_compute. */
952
953 static void
954 df_rd_bb_local_compute_process_def (struct dataflow *dflow,
955 struct df_rd_bb_info *bb_info,
956 struct df_ref *def,
957 enum df_ref_flags top_flag)
958 {
959 struct df *df = dflow->df;
960 while (def)
961 {
962 if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
963 {
964 unsigned int regno = DF_REF_REGNO (def);
965 unsigned int begin = DF_REG_DEF_GET (df, regno)->begin;
966 unsigned int n_defs = DF_REG_DEF_GET (df, regno)->n_refs;
967
968 /* Only the last def(s) for a regno in the block has any
969 effect. */
970 if (!bitmap_bit_p (seen_in_block, regno))
971 {
972 /* The first def for regno in insn gets to knock out the
973 defs from other instructions. */
974 if (!bitmap_bit_p (seen_in_insn, regno))
975 {
976 if (n_defs > DF_SPARSE_THRESHOLD)
977 {
978 bitmap_set_bit (bb_info->sparse_kill, regno);
979 bitmap_clear_range(bb_info->gen, begin, n_defs);
980 }
981 else
982 {
983 struct df_rd_problem_data * problem_data =
984 (struct df_rd_problem_data *)dflow->problem_data;
985 bitmap defs =
986 df_ref_bitmap (problem_data->def_sites, regno,
987 begin, n_defs);
988 bitmap_ior_into (bb_info->kill, defs);
989 bitmap_and_compl_into (bb_info->gen, defs);
990 }
991 }
992
993 bitmap_set_bit (seen_in_insn, regno);
994 /* All defs for regno in the instruction may be put into
995 the gen set. */
996 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
997 bitmap_set_bit (bb_info->gen, DF_REF_ID (def));
998 }
999 }
1000 def = def->next_ref;
1001 }
1002 }
1003
1004 /* Compute local reaching def info for basic block BB. */
1005
1006 static void
1007 df_rd_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
1008 {
1009 struct df *df = dflow->df;
1010 basic_block bb = BASIC_BLOCK (bb_index);
1011 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1012 rtx insn;
1013
1014 bitmap_clear (seen_in_block);
1015 bitmap_clear (seen_in_insn);
1016
1017 df_rd_bb_local_compute_process_def (dflow, bb_info,
1018 df_get_artificial_defs (df, bb_index), 0);
1019
1020 FOR_BB_INSNS_REVERSE (bb, insn)
1021 {
1022 unsigned int uid = INSN_UID (insn);
1023
1024 if (! INSN_P (insn))
1025 continue;
1026
1027 df_rd_bb_local_compute_process_def (dflow, bb_info,
1028 DF_INSN_UID_GET (df, uid)->defs, 0);
1029
1030 /* This complex dance with the two bitmaps is required because
1031 instructions can assign twice to the same pseudo. This
1032 generally happens with calls that will have one def for the
1033 result and another def for the clobber. If only one vector
1034 is used and the clobber goes first, the result will be
1035 lost. */
1036 bitmap_ior_into (seen_in_block, seen_in_insn);
1037 bitmap_clear (seen_in_insn);
1038 }
1039
1040 /* Process the artificial defs at the top of the block last since we
1041 are going backwards through the block and these are logically at
1042 the start. */
1043 df_rd_bb_local_compute_process_def (dflow, bb_info,
1044 df_get_artificial_defs (df, bb_index),
1045 DF_REF_AT_TOP);
1046 }
1047
1048
1049 /* Compute local reaching def info for each basic block within BLOCKS. */
1050
1051 static void
1052 df_rd_local_compute (struct dataflow *dflow,
1053 bitmap all_blocks,
1054 bitmap rescan_blocks ATTRIBUTE_UNUSED)
1055 {
1056 struct df *df = dflow->df;
1057 unsigned int bb_index;
1058 bitmap_iterator bi;
1059 unsigned int regno;
1060 struct df_rd_problem_data *problem_data =
1061 (struct df_rd_problem_data *) dflow->problem_data;
1062 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1063 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1064
1065 df_set_seen ();
1066
1067 if (!df->def_info.refs_organized)
1068 df_reorganize_refs (&df->def_info);
1069
1070 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1071 {
1072 df_rd_bb_local_compute (dflow, bb_index);
1073 }
1074
1075 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
1076 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
1077 {
1078 struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
1079 if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
1080 {
1081 bitmap_set_bit (sparse_invalidated, regno);
1082 }
1083 else
1084 {
1085 bitmap defs = df_ref_bitmap (problem_data->def_sites, regno,
1086 reg_info->begin, reg_info->n_refs);
1087 bitmap_ior_into (dense_invalidated, defs);
1088 }
1089 }
1090 df_unset_seen ();
1091 }
1092
1093
1094 /* Initialize the solution bit vectors for problem. */
1095
1096 static void
1097 df_rd_init_solution (struct dataflow *dflow, bitmap all_blocks)
1098 {
1099 unsigned int bb_index;
1100 bitmap_iterator bi;
1101
1102 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1103 {
1104 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1105
1106 bitmap_copy (bb_info->out, bb_info->gen);
1107 bitmap_clear (bb_info->in);
1108 }
1109 }
1110
1111 /* In of target gets or of out of source. */
1112
1113 static void
1114 df_rd_confluence_n (struct dataflow *dflow, edge e)
1115 {
1116 bitmap op1 = df_rd_get_bb_info (dflow, e->dest->index)->in;
1117 bitmap op2 = df_rd_get_bb_info (dflow, e->src->index)->out;
1118
1119 if (e->flags & EDGE_EH)
1120 {
1121 struct df_rd_problem_data *problem_data =
1122 (struct df_rd_problem_data *) dflow->problem_data;
1123 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1124 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1125 struct df *df = dflow->df;
1126 bitmap_iterator bi;
1127 unsigned int regno;
1128 bitmap tmp = BITMAP_ALLOC (NULL);
1129
1130 bitmap_copy (tmp, op2);
1131 bitmap_and_compl_into (tmp, dense_invalidated);
1132
1133 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
1134 {
1135 bitmap_clear_range (tmp,
1136 DF_REG_DEF_GET (df, regno)->begin,
1137 DF_REG_DEF_GET (df, regno)->n_refs);
1138 }
1139 bitmap_ior_into (op1, tmp);
1140 BITMAP_FREE (tmp);
1141 }
1142 else
1143 bitmap_ior_into (op1, op2);
1144 }
1145
1146
1147 /* Transfer function. */
1148
1149 static bool
1150 df_rd_transfer_function (struct dataflow *dflow, int bb_index)
1151 {
1152 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1153 unsigned int regno;
1154 bitmap_iterator bi;
1155 bitmap in = bb_info->in;
1156 bitmap out = bb_info->out;
1157 bitmap gen = bb_info->gen;
1158 bitmap kill = bb_info->kill;
1159 bitmap sparse_kill = bb_info->sparse_kill;
1160
1161 if (bitmap_empty_p (sparse_kill))
1162 return bitmap_ior_and_compl (out, gen, in, kill);
1163 else
1164 {
1165 struct df *df = dflow->df;
1166 bool changed = false;
1167 bitmap tmp = BITMAP_ALLOC (NULL);
1168 bitmap_copy (tmp, in);
1169 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
1170 {
1171 bitmap_clear_range (tmp,
1172 DF_REG_DEF_GET (df, regno)->begin,
1173 DF_REG_DEF_GET (df, regno)->n_refs);
1174 }
1175 bitmap_and_compl_into (tmp, kill);
1176 bitmap_ior_into (tmp, gen);
1177 changed = !bitmap_equal_p (tmp, out);
1178 if (changed)
1179 {
1180 BITMAP_FREE (out);
1181 bb_info->out = tmp;
1182 }
1183 else
1184 BITMAP_FREE (tmp);
1185 return changed;
1186 }
1187 }
1188
1189
1190 /* Free all storage associated with the problem. */
1191
1192 static void
1193 df_rd_free (struct dataflow *dflow)
1194 {
1195 unsigned int i;
1196 struct df_rd_problem_data *problem_data =
1197 (struct df_rd_problem_data *) dflow->problem_data;
1198
1199 if (problem_data)
1200 {
1201 for (i = 0; i < dflow->block_info_size; i++)
1202 {
1203 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, i);
1204 if (bb_info)
1205 {
1206 BITMAP_FREE (bb_info->kill);
1207 BITMAP_FREE (bb_info->sparse_kill);
1208 BITMAP_FREE (bb_info->gen);
1209 BITMAP_FREE (bb_info->in);
1210 BITMAP_FREE (bb_info->out);
1211 }
1212 }
1213
1214 free_alloc_pool (dflow->block_pool);
1215
1216 for (i = 0; i < problem_data->def_sites_size; i++)
1217 {
1218 bitmap bm = problem_data->def_sites[i];
1219 if (bm)
1220 BITMAP_FREE (bm);
1221 }
1222
1223 free (problem_data->def_sites);
1224 BITMAP_FREE (problem_data->sparse_invalidated_by_call);
1225 BITMAP_FREE (problem_data->dense_invalidated_by_call);
1226
1227 dflow->block_info_size = 0;
1228 free (dflow->block_info);
1229 free (dflow->problem_data);
1230 }
1231 free (dflow);
1232 }
1233
1234
1235 /* Debugging info. */
1236
1237 static void
1238 df_rd_dump (struct dataflow *dflow, FILE *file)
1239 {
1240 struct df *df = dflow->df;
1241 basic_block bb;
1242 struct df_rd_problem_data *problem_data =
1243 (struct df_rd_problem_data *) dflow->problem_data;
1244 unsigned int m = max_reg_num ();
1245 unsigned int regno;
1246
1247 fprintf (file, "Reaching defs:\n\n");
1248
1249 fprintf (file, " sparse invalidated \t");
1250 dump_bitmap (file, problem_data->sparse_invalidated_by_call);
1251 fprintf (file, " dense invalidated \t");
1252 dump_bitmap (file, problem_data->dense_invalidated_by_call);
1253
1254 for (regno = 0; regno < m; regno++)
1255 if (DF_REG_DEF_GET (df, regno)->n_refs)
1256 fprintf (file, "%d[%d,%d] ", regno,
1257 DF_REG_DEF_GET (df, regno)->begin,
1258 DF_REG_DEF_GET (df, regno)->n_refs);
1259 fprintf (file, "\n");
1260
1261 FOR_ALL_BB (bb)
1262 {
1263 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb->index);
1264 df_print_bb_index (bb, file);
1265
1266 if (! bb_info->in)
1267 continue;
1268
1269 fprintf (file, " in\t(%d)\n", (int) bitmap_count_bits (bb_info->in));
1270 dump_bitmap (file, bb_info->in);
1271 fprintf (file, " gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
1272 dump_bitmap (file, bb_info->gen);
1273 fprintf (file, " kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
1274 dump_bitmap (file, bb_info->kill);
1275 fprintf (file, " out\t(%d)\n", (int) bitmap_count_bits (bb_info->out));
1276 dump_bitmap (file, bb_info->out);
1277 }
1278 }
1279
1280 /* All of the information associated with every instance of the problem. */
1281
1282 static struct df_problem problem_RD =
1283 {
1284 DF_RD, /* Problem id. */
1285 DF_FORWARD, /* Direction. */
1286 df_rd_alloc, /* Allocate the problem specific data. */
1287 NULL, /* Reset global information. */
1288 df_rd_free_bb_info, /* Free basic block info. */
1289 df_rd_local_compute, /* Local compute function. */
1290 df_rd_init_solution, /* Init the solution specific data. */
1291 df_iterative_dataflow, /* Iterative solver. */
1292 NULL, /* Confluence operator 0. */
1293 df_rd_confluence_n, /* Confluence operator n. */
1294 df_rd_transfer_function, /* Transfer function. */
1295 NULL, /* Finalize function. */
1296 df_rd_free, /* Free all of the problem information. */
1297 df_rd_dump, /* Debugging. */
1298 NULL /* Dependent problem. */
1299 };
1300
1301
1302
1303 /* Create a new DATAFLOW instance and add it to an existing instance
1304 of DF. The returned structure is what is used to get at the
1305 solution. */
1306
1307 struct dataflow *
1308 df_rd_add_problem (struct df *df)
1309 {
1310 return df_add_problem (df, &problem_RD);
1311 }
1312
1313
1314 \f
1315 /*----------------------------------------------------------------------------
1316 LIVE REGISTERS
1317
1318 Find the locations in the function where any use of a pseudo can reach
1319 in the backwards direction.
1320 ----------------------------------------------------------------------------*/
1321
1322 /* Get basic block info. */
1323
1324 struct df_lr_bb_info *
1325 df_lr_get_bb_info (struct dataflow *dflow, unsigned int index)
1326 {
1327 return (struct df_lr_bb_info *) dflow->block_info[index];
1328 }
1329
1330
1331 /* Set basic block info. */
1332
1333 static void
1334 df_lr_set_bb_info (struct dataflow *dflow, unsigned int index,
1335 struct df_lr_bb_info *bb_info)
1336 {
1337 dflow->block_info[index] = bb_info;
1338 }
1339
1340
1341 /* Free basic block info. */
1342
1343 static void
1344 df_lr_free_bb_info (struct dataflow *dflow,
1345 basic_block bb ATTRIBUTE_UNUSED,
1346 void *vbb_info)
1347 {
1348 struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
1349 if (bb_info)
1350 {
1351 BITMAP_FREE (bb_info->use);
1352 BITMAP_FREE (bb_info->def);
1353 BITMAP_FREE (bb_info->in);
1354 BITMAP_FREE (bb_info->out);
1355 pool_free (dflow->block_pool, bb_info);
1356 }
1357 }
1358
1359
1360 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1361 not touched unless the block is new. */
1362
1363 static void
1364 df_lr_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
1365 {
1366 unsigned int bb_index;
1367 bitmap_iterator bi;
1368
1369 if (! dflow->block_pool)
1370 dflow->block_pool = create_alloc_pool ("df_lr_block pool",
1371 sizeof (struct df_lr_bb_info), 50);
1372
1373 df_grow_bb_info (dflow);
1374
1375 /* Because of the clustering of all def sites for the same pseudo,
1376 we have to process all of the blocks before doing the
1377 analysis. */
1378
1379 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1380 {
1381 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1382 if (bb_info)
1383 {
1384 bitmap_clear (bb_info->def);
1385 bitmap_clear (bb_info->use);
1386 }
1387 else
1388 {
1389 bb_info = (struct df_lr_bb_info *) pool_alloc (dflow->block_pool);
1390 df_lr_set_bb_info (dflow, bb_index, bb_info);
1391 bb_info->use = BITMAP_ALLOC (NULL);
1392 bb_info->def = BITMAP_ALLOC (NULL);
1393 bb_info->in = BITMAP_ALLOC (NULL);
1394 bb_info->out = BITMAP_ALLOC (NULL);
1395 }
1396 }
1397 }
1398
1399
1400 /* Compute local live register info for basic block BB. */
1401
1402 static void
1403 df_lr_bb_local_compute (struct dataflow *dflow,
1404 struct df *df, unsigned int bb_index)
1405 {
1406 basic_block bb = BASIC_BLOCK (bb_index);
1407 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1408 rtx insn;
1409 struct df_ref *def;
1410 struct df_ref *use;
1411
1412 /* Process the registers set in an exception handler. */
1413 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1414 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
1415 {
1416 unsigned int dregno = DF_REF_REGNO (def);
1417 bitmap_set_bit (bb_info->def, dregno);
1418 bitmap_clear_bit (bb_info->use, dregno);
1419 }
1420
1421 /* Process the hardware registers that are always live. */
1422 for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1423 /* Add use to set of uses in this BB. */
1424 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
1425 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1426
1427 FOR_BB_INSNS_REVERSE (bb, insn)
1428 {
1429 unsigned int uid = INSN_UID (insn);
1430
1431 if (! INSN_P (insn))
1432 continue;
1433
1434 if (CALL_P (insn))
1435 {
1436 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1437 {
1438 unsigned int dregno = DF_REF_REGNO (def);
1439
1440 if (dregno >= FIRST_PSEUDO_REGISTER
1441 || !(SIBLING_CALL_P (insn)
1442 && bitmap_bit_p (df->exit_block_uses, dregno)
1443 && !refers_to_regno_p (dregno, dregno+1,
1444 current_function_return_rtx,
1445 (rtx *)0)))
1446 {
1447 /* Add def to set of defs in this BB. */
1448 bitmap_set_bit (bb_info->def, dregno);
1449 bitmap_clear_bit (bb_info->use, dregno);
1450 }
1451 }
1452 }
1453 else
1454 {
1455 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1456 {
1457 unsigned int dregno = DF_REF_REGNO (def);
1458
1459 if (DF_INSN_CONTAINS_ASM (df, insn)
1460 && dregno < FIRST_PSEUDO_REGISTER)
1461 {
1462 unsigned int i;
1463 unsigned int end =
1464 dregno + hard_regno_nregs[dregno][GET_MODE (DF_REF_REG (def))] - 1;
1465 for (i = dregno; i <= end; ++i)
1466 regs_asm_clobbered[i] = 1;
1467 }
1468 /* Add def to set of defs in this BB. */
1469 bitmap_set_bit (bb_info->def, dregno);
1470 bitmap_clear_bit (bb_info->use, dregno);
1471 }
1472 }
1473
1474 for (use = DF_INSN_UID_GET (df, uid)->uses; use; use = use->next_ref)
1475 /* Add use to set of uses in this BB. */
1476 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1477 }
1478
1479 /* Process the registers set in an exception handler. */
1480 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1481 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
1482 {
1483 unsigned int dregno = DF_REF_REGNO (def);
1484 bitmap_set_bit (bb_info->def, dregno);
1485 bitmap_clear_bit (bb_info->use, dregno);
1486 }
1487
1488 #ifdef EH_USES
1489 /* Process the uses that are live into an exception handler. */
1490 for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1491 /* Add use to set of uses in this BB. */
1492 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
1493 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1494 #endif
1495 }
1496
1497 /* Compute local live register info for each basic block within BLOCKS. */
1498
1499 static void
1500 df_lr_local_compute (struct dataflow *dflow,
1501 bitmap all_blocks,
1502 bitmap rescan_blocks)
1503 {
1504 struct df *df = dflow->df;
1505 unsigned int bb_index;
1506 bitmap_iterator bi;
1507
1508 /* Assume that the stack pointer is unchanging if alloca hasn't
1509 been used. */
1510 if (bitmap_equal_p (all_blocks, rescan_blocks))
1511 memset (regs_asm_clobbered, 0, sizeof (regs_asm_clobbered));
1512
1513 bitmap_clear (df->hardware_regs_used);
1514
1515 /* The all-important stack pointer must always be live. */
1516 bitmap_set_bit (df->hardware_regs_used, STACK_POINTER_REGNUM);
1517
1518 /* Before reload, there are a few registers that must be forced
1519 live everywhere -- which might not already be the case for
1520 blocks within infinite loops. */
1521 if (! reload_completed)
1522 {
1523 /* Any reference to any pseudo before reload is a potential
1524 reference of the frame pointer. */
1525 bitmap_set_bit (df->hardware_regs_used, FRAME_POINTER_REGNUM);
1526
1527 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1528 /* Pseudos with argument area equivalences may require
1529 reloading via the argument pointer. */
1530 if (fixed_regs[ARG_POINTER_REGNUM])
1531 bitmap_set_bit (df->hardware_regs_used, ARG_POINTER_REGNUM);
1532 #endif
1533
1534 /* Any constant, or pseudo with constant equivalences, may
1535 require reloading from memory using the pic register. */
1536 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1537 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1538 bitmap_set_bit (df->hardware_regs_used, PIC_OFFSET_TABLE_REGNUM);
1539 }
1540
1541 if (bitmap_bit_p (rescan_blocks, EXIT_BLOCK))
1542 {
1543 /* The exit block is special for this problem and its bits are
1544 computed from thin air. */
1545 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, EXIT_BLOCK);
1546 bitmap_copy (bb_info->use, df->exit_block_uses);
1547 }
1548
1549 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1550 {
1551 if (bb_index == EXIT_BLOCK)
1552 continue;
1553 df_lr_bb_local_compute (dflow, df, bb_index);
1554 }
1555 }
1556
1557
1558 /* Initialize the solution vectors. */
1559
1560 static void
1561 df_lr_init (struct dataflow *dflow, bitmap all_blocks)
1562 {
1563 unsigned int bb_index;
1564 bitmap_iterator bi;
1565
1566 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1567 {
1568 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1569 bitmap_copy (bb_info->in, bb_info->use);
1570 bitmap_clear (bb_info->out);
1571 }
1572 }
1573
1574
1575 /* Confluence function that processes infinite loops. This might be a
1576 noreturn function that throws. And even if it isn't, getting the
1577 unwind info right helps debugging. */
1578 static void
1579 df_lr_confluence_0 (struct dataflow *dflow, basic_block bb)
1580 {
1581 struct df *df = dflow->df;
1582
1583 bitmap op1 = df_lr_get_bb_info (dflow, bb->index)->out;
1584 if (bb != EXIT_BLOCK_PTR)
1585 bitmap_copy (op1, df->hardware_regs_used);
1586 }
1587
1588
1589 /* Confluence function that ignores fake edges. */
1590 static void
1591 df_lr_confluence_n (struct dataflow *dflow, edge e)
1592 {
1593 bitmap op1 = df_lr_get_bb_info (dflow, e->src->index)->out;
1594 bitmap op2 = df_lr_get_bb_info (dflow, e->dest->index)->in;
1595
1596 /* Call-clobbered registers die across exception and call edges. */
1597 /* ??? Abnormal call edges ignored for the moment, as this gets
1598 confused by sibling call edges, which crashes reg-stack. */
1599 if (e->flags & EDGE_EH)
1600 bitmap_ior_and_compl_into (op1, op2, df_invalidated_by_call);
1601 else
1602 bitmap_ior_into (op1, op2);
1603
1604 bitmap_ior_into (op1, dflow->df->hardware_regs_used);
1605 }
1606
1607
1608 /* Transfer function. */
1609 static bool
1610 df_lr_transfer_function (struct dataflow *dflow, int bb_index)
1611 {
1612 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1613 bitmap in = bb_info->in;
1614 bitmap out = bb_info->out;
1615 bitmap use = bb_info->use;
1616 bitmap def = bb_info->def;
1617
1618 return bitmap_ior_and_compl (in, use, out, def);
1619 }
1620
1621
1622 /* Free all storage associated with the problem. */
1623
1624 static void
1625 df_lr_free (struct dataflow *dflow)
1626 {
1627 if (dflow->block_info)
1628 {
1629 unsigned int i;
1630 for (i = 0; i < dflow->block_info_size; i++)
1631 {
1632 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, i);
1633 if (bb_info)
1634 {
1635 BITMAP_FREE (bb_info->use);
1636 BITMAP_FREE (bb_info->def);
1637 BITMAP_FREE (bb_info->in);
1638 BITMAP_FREE (bb_info->out);
1639 }
1640 }
1641 free_alloc_pool (dflow->block_pool);
1642
1643 dflow->block_info_size = 0;
1644 free (dflow->block_info);
1645 }
1646 free (dflow);
1647 }
1648
1649
1650 /* Debugging info. */
1651
1652 static void
1653 df_lr_dump (struct dataflow *dflow, FILE *file)
1654 {
1655 basic_block bb;
1656
1657 fprintf (file, "Live Registers:\n");
1658 FOR_ALL_BB (bb)
1659 {
1660 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb->index);
1661 df_print_bb_index (bb, file);
1662
1663 if (!bb_info->in)
1664 continue;
1665
1666 fprintf (file, " in \t");
1667 dump_bitmap (file, bb_info->in);
1668 fprintf (file, " use \t");
1669 dump_bitmap (file, bb_info->use);
1670 fprintf (file, " def \t");
1671 dump_bitmap (file, bb_info->def);
1672 fprintf (file, " out \t");
1673 dump_bitmap (file, bb_info->out);
1674 }
1675 }
1676
1677 /* All of the information associated with every instance of the problem. */
1678
1679 static struct df_problem problem_LR =
1680 {
1681 DF_LR, /* Problem id. */
1682 DF_BACKWARD, /* Direction. */
1683 df_lr_alloc, /* Allocate the problem specific data. */
1684 NULL, /* Reset global information. */
1685 df_lr_free_bb_info, /* Free basic block info. */
1686 df_lr_local_compute, /* Local compute function. */
1687 df_lr_init, /* Init the solution specific data. */
1688 df_iterative_dataflow, /* Iterative solver. */
1689 df_lr_confluence_0, /* Confluence operator 0. */
1690 df_lr_confluence_n, /* Confluence operator n. */
1691 df_lr_transfer_function, /* Transfer function. */
1692 NULL, /* Finalize function. */
1693 df_lr_free, /* Free all of the problem information. */
1694 df_lr_dump, /* Debugging. */
1695 NULL /* Dependent problem. */
1696 };
1697
1698
1699 /* Create a new DATAFLOW instance and add it to an existing instance
1700 of DF. The returned structure is what is used to get at the
1701 solution. */
1702
1703 struct dataflow *
1704 df_lr_add_problem (struct df *df)
1705 {
1706 return df_add_problem (df, &problem_LR);
1707 }
1708
1709
1710 \f
1711 /*----------------------------------------------------------------------------
1712 UNINITIALIZED REGISTERS
1713
1714 Find the set of uses for registers that are reachable from the entry
1715 block without passing thru a definition.
1716 ----------------------------------------------------------------------------*/
1717
1718 /* Get basic block info. */
1719
1720 struct df_ur_bb_info *
1721 df_ur_get_bb_info (struct dataflow *dflow, unsigned int index)
1722 {
1723 return (struct df_ur_bb_info *) dflow->block_info[index];
1724 }
1725
1726
1727 /* Set basic block info. */
1728
1729 static void
1730 df_ur_set_bb_info (struct dataflow *dflow, unsigned int index,
1731 struct df_ur_bb_info *bb_info)
1732 {
1733 dflow->block_info[index] = bb_info;
1734 }
1735
1736
1737 /* Free basic block info. */
1738
1739 static void
1740 df_ur_free_bb_info (struct dataflow *dflow,
1741 basic_block bb ATTRIBUTE_UNUSED,
1742 void *vbb_info)
1743 {
1744 struct df_ur_bb_info *bb_info = (struct df_ur_bb_info *) vbb_info;
1745 if (bb_info)
1746 {
1747 BITMAP_FREE (bb_info->gen);
1748 BITMAP_FREE (bb_info->kill);
1749 BITMAP_FREE (bb_info->in);
1750 BITMAP_FREE (bb_info->out);
1751 pool_free (dflow->block_pool, bb_info);
1752 }
1753 }
1754
1755
1756 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1757 not touched unless the block is new. */
1758
1759 static void
1760 df_ur_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
1761 {
1762 unsigned int bb_index;
1763 bitmap_iterator bi;
1764
1765 if (! dflow->block_pool)
1766 dflow->block_pool = create_alloc_pool ("df_ur_block pool",
1767 sizeof (struct df_ur_bb_info), 100);
1768
1769 df_grow_bb_info (dflow);
1770
1771 /* Because of the clustering of all def sites for the same pseudo,
1772 we have to process all of the blocks before doing the
1773 analysis. */
1774
1775 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1776 {
1777 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1778 if (bb_info)
1779 {
1780 bitmap_clear (bb_info->kill);
1781 bitmap_clear (bb_info->gen);
1782 }
1783 else
1784 {
1785 bb_info = (struct df_ur_bb_info *) pool_alloc (dflow->block_pool);
1786 df_ur_set_bb_info (dflow, bb_index, bb_info);
1787 bb_info->kill = BITMAP_ALLOC (NULL);
1788 bb_info->gen = BITMAP_ALLOC (NULL);
1789 bb_info->in = BITMAP_ALLOC (NULL);
1790 bb_info->out = BITMAP_ALLOC (NULL);
1791 }
1792 }
1793 }
1794
1795
1796 /* Compute local uninitialized register info for basic block BB. */
1797
1798 static void
1799 df_ur_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
1800 {
1801 struct df *df = dflow->df;
1802 basic_block bb = BASIC_BLOCK (bb_index);
1803 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1804 rtx insn;
1805 struct df_ref *def;
1806
1807 bitmap_clear (seen_in_block);
1808 bitmap_clear (seen_in_insn);
1809
1810 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1811 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
1812 {
1813 unsigned int regno = DF_REF_REGNO (def);
1814 if (!bitmap_bit_p (seen_in_block, regno))
1815 {
1816 bitmap_set_bit (seen_in_block, regno);
1817 bitmap_set_bit (bb_info->gen, regno);
1818 }
1819 }
1820
1821 FOR_BB_INSNS_REVERSE (bb, insn)
1822 {
1823 unsigned int uid = INSN_UID (insn);
1824 if (!INSN_P (insn))
1825 continue;
1826
1827 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1828 {
1829 unsigned int regno = DF_REF_REGNO (def);
1830 /* Only the last def counts. */
1831 if (!bitmap_bit_p (seen_in_block, regno))
1832 {
1833 bitmap_set_bit (seen_in_insn, regno);
1834
1835 if (DF_REF_FLAGS (def) & DF_REF_CLOBBER)
1836 bitmap_set_bit (bb_info->kill, regno);
1837 else
1838 bitmap_set_bit (bb_info->gen, regno);
1839 }
1840 }
1841 bitmap_ior_into (seen_in_block, seen_in_insn);
1842 bitmap_clear (seen_in_insn);
1843 }
1844
1845 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1846 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
1847 {
1848 unsigned int regno = DF_REF_REGNO (def);
1849 if (!bitmap_bit_p (seen_in_block, regno))
1850 {
1851 bitmap_set_bit (seen_in_block, regno);
1852 bitmap_set_bit (bb_info->gen, regno);
1853 }
1854 }
1855 }
1856
1857
1858 /* Compute local uninitialized register info. */
1859
1860 static void
1861 df_ur_local_compute (struct dataflow *dflow,
1862 bitmap all_blocks ATTRIBUTE_UNUSED,
1863 bitmap rescan_blocks)
1864 {
1865 unsigned int bb_index;
1866 bitmap_iterator bi;
1867
1868 df_set_seen ();
1869
1870 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1871 {
1872 df_ur_bb_local_compute (dflow, bb_index);
1873 }
1874
1875 df_unset_seen ();
1876 }
1877
1878
1879 /* Initialize the solution vectors. */
1880
1881 static void
1882 df_ur_init (struct dataflow *dflow, bitmap all_blocks)
1883 {
1884 unsigned int bb_index;
1885 bitmap_iterator bi;
1886
1887 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1888 {
1889 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1890
1891 bitmap_copy (bb_info->out, bb_info->gen);
1892 bitmap_clear (bb_info->in);
1893 }
1894 }
1895
1896
1897 /* Or in the stack regs, hard regs and early clobber regs into the the
1898 ur_in sets of all of the blocks. */
1899
1900 static void
1901 df_ur_local_finalize (struct dataflow *dflow, bitmap all_blocks)
1902 {
1903 struct df *df = dflow->df;
1904 struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
1905 bitmap tmp = BITMAP_ALLOC (NULL);
1906 bitmap_iterator bi;
1907 unsigned int bb_index;
1908
1909 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1910 {
1911 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1912 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
1913
1914 /* No register may reach a location where it is not used. Thus
1915 we trim the rr result to the places where it is used. */
1916 bitmap_and_into (bb_info->in, bb_lr_info->in);
1917 bitmap_and_into (bb_info->out, bb_lr_info->out);
1918
1919 #if 1
1920 /* Hard registers may still stick in the ur_out set, but not
1921 be in the ur_in set, if their only mention was in a call
1922 in this block. This is because a call kills in the lr
1923 problem but does not kill in the ur problem. To clean
1924 this up, we execute the transfer function on the lr_in
1925 set and then use that to knock bits out of ur_out. */
1926 bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
1927 bb_info->kill);
1928 bitmap_and_into (bb_info->out, tmp);
1929 #endif
1930 }
1931
1932 BITMAP_FREE (tmp);
1933 }
1934
1935
1936 /* Confluence function that ignores fake edges. */
1937
1938 static void
1939 df_ur_confluence_n (struct dataflow *dflow, edge e)
1940 {
1941 bitmap op1 = df_ur_get_bb_info (dflow, e->dest->index)->in;
1942 bitmap op2 = df_ur_get_bb_info (dflow, e->src->index)->out;
1943
1944 if (e->flags & EDGE_FAKE)
1945 return;
1946
1947 bitmap_ior_into (op1, op2);
1948 }
1949
1950
1951 /* Transfer function. */
1952
1953 static bool
1954 df_ur_transfer_function (struct dataflow *dflow, int bb_index)
1955 {
1956 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1957 bitmap in = bb_info->in;
1958 bitmap out = bb_info->out;
1959 bitmap gen = bb_info->gen;
1960 bitmap kill = bb_info->kill;
1961
1962 return bitmap_ior_and_compl (out, gen, in, kill);
1963 }
1964
1965
1966 /* Free all storage associated with the problem. */
1967
1968 static void
1969 df_ur_free (struct dataflow *dflow)
1970 {
1971 if (dflow->block_info)
1972 {
1973 unsigned int i;
1974
1975 for (i = 0; i < dflow->block_info_size; i++)
1976 {
1977 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, i);
1978 if (bb_info)
1979 {
1980 BITMAP_FREE (bb_info->gen);
1981 BITMAP_FREE (bb_info->kill);
1982 BITMAP_FREE (bb_info->in);
1983 BITMAP_FREE (bb_info->out);
1984 }
1985 }
1986
1987 free_alloc_pool (dflow->block_pool);
1988 dflow->block_info_size = 0;
1989 free (dflow->block_info);
1990 }
1991 free (dflow);
1992 }
1993
1994
1995 /* Debugging info. */
1996
1997 static void
1998 df_ur_dump (struct dataflow *dflow, FILE *file)
1999 {
2000 basic_block bb;
2001
2002 fprintf (file, "Undefined regs:\n");
2003
2004 FOR_ALL_BB (bb)
2005 {
2006 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb->index);
2007 df_print_bb_index (bb, file);
2008
2009 if (! bb_info->in)
2010 continue;
2011
2012 fprintf (file, " in \t");
2013 dump_bitmap (file, bb_info->in);
2014 fprintf (file, " gen \t");
2015 dump_bitmap (file, bb_info->gen);
2016 fprintf (file, " kill\t");
2017 dump_bitmap (file, bb_info->kill);
2018 fprintf (file, " out \t");
2019 dump_bitmap (file, bb_info->out);
2020 }
2021 }
2022
2023 /* All of the information associated with every instance of the problem. */
2024
2025 static struct df_problem problem_UR =
2026 {
2027 DF_UR, /* Problem id. */
2028 DF_FORWARD, /* Direction. */
2029 df_ur_alloc, /* Allocate the problem specific data. */
2030 NULL, /* Reset global information. */
2031 df_ur_free_bb_info, /* Free basic block info. */
2032 df_ur_local_compute, /* Local compute function. */
2033 df_ur_init, /* Init the solution specific data. */
2034 df_iterative_dataflow, /* Iterative solver. */
2035 NULL, /* Confluence operator 0. */
2036 df_ur_confluence_n, /* Confluence operator n. */
2037 df_ur_transfer_function, /* Transfer function. */
2038 df_ur_local_finalize, /* Finalize function. */
2039 df_ur_free, /* Free all of the problem information. */
2040 df_ur_dump, /* Debugging. */
2041 &problem_LR /* Dependent problem. */
2042 };
2043
2044
2045 /* Create a new DATAFLOW instance and add it to an existing instance
2046 of DF. The returned structure is what is used to get at the
2047 solution. */
2048
2049 struct dataflow *
2050 df_ur_add_problem (struct df *df)
2051 {
2052 return df_add_problem (df, &problem_UR);
2053 }
2054
2055
2056 \f
2057 /*----------------------------------------------------------------------------
2058 UNINITIALIZED REGISTERS WITH EARLYCLOBBER
2059
2060 Find the set of uses for registers that are reachable from the entry
2061 block without passing thru a definition.
2062
2063 This is a variant of the UR problem above that has a lot of special
2064 features just for the register allocation phase.
2065 ----------------------------------------------------------------------------*/
2066
2067 struct df_urec_problem_data
2068 {
2069 bool earlyclobbers_found; /* True if any instruction contains an
2070 earlyclobber. */
2071 #ifdef STACK_REGS
2072 bitmap stack_regs; /* Registers that may be allocated to a STACK_REGS. */
2073 #endif
2074 };
2075
2076
2077 /* Get basic block info. */
2078
2079 struct df_urec_bb_info *
2080 df_urec_get_bb_info (struct dataflow *dflow, unsigned int index)
2081 {
2082 return (struct df_urec_bb_info *) dflow->block_info[index];
2083 }
2084
2085
2086 /* Set basic block info. */
2087
2088 static void
2089 df_urec_set_bb_info (struct dataflow *dflow, unsigned int index,
2090 struct df_urec_bb_info *bb_info)
2091 {
2092 dflow->block_info[index] = bb_info;
2093 }
2094
2095
2096 /* Free basic block info. */
2097
2098 static void
2099 df_urec_free_bb_info (struct dataflow *dflow,
2100 basic_block bb ATTRIBUTE_UNUSED,
2101 void *vbb_info)
2102 {
2103 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info *) vbb_info;
2104 if (bb_info)
2105 {
2106 BITMAP_FREE (bb_info->gen);
2107 BITMAP_FREE (bb_info->kill);
2108 BITMAP_FREE (bb_info->in);
2109 BITMAP_FREE (bb_info->out);
2110 BITMAP_FREE (bb_info->earlyclobber);
2111 pool_free (dflow->block_pool, bb_info);
2112 }
2113 }
2114
2115
2116 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
2117 not touched unless the block is new. */
2118
2119 static void
2120 df_urec_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
2121 {
2122 unsigned int bb_index;
2123 bitmap_iterator bi;
2124 struct df_urec_problem_data *problem_data =
2125 (struct df_urec_problem_data *) dflow->problem_data;
2126
2127 if (! dflow->block_pool)
2128 dflow->block_pool = create_alloc_pool ("df_urec_block pool",
2129 sizeof (struct df_urec_bb_info), 50);
2130
2131 if (!dflow->problem_data)
2132 {
2133 problem_data = xmalloc (sizeof (struct df_urec_problem_data));
2134 dflow->problem_data = problem_data;
2135 }
2136 problem_data->earlyclobbers_found = false;
2137
2138 df_grow_bb_info (dflow);
2139
2140 /* Because of the clustering of all def sites for the same pseudo,
2141 we have to process all of the blocks before doing the
2142 analysis. */
2143
2144 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
2145 {
2146 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2147 if (bb_info)
2148 {
2149 bitmap_clear (bb_info->kill);
2150 bitmap_clear (bb_info->gen);
2151 bitmap_clear (bb_info->earlyclobber);
2152 }
2153 else
2154 {
2155 bb_info = (struct df_urec_bb_info *) pool_alloc (dflow->block_pool);
2156 df_urec_set_bb_info (dflow, bb_index, bb_info);
2157 bb_info->kill = BITMAP_ALLOC (NULL);
2158 bb_info->gen = BITMAP_ALLOC (NULL);
2159 bb_info->in = BITMAP_ALLOC (NULL);
2160 bb_info->out = BITMAP_ALLOC (NULL);
2161 bb_info->earlyclobber = BITMAP_ALLOC (NULL);
2162 }
2163 }
2164 }
2165
2166
2167 /* The function modifies local info for register REG being changed in
2168 SETTER. DATA is used to pass the current basic block info. */
2169
2170 static void
2171 df_urec_mark_reg_change (rtx reg, rtx setter, void *data)
2172 {
2173 int regno;
2174 int endregno;
2175 int i;
2176 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2177
2178 if (GET_CODE (reg) == SUBREG)
2179 reg = SUBREG_REG (reg);
2180
2181 if (!REG_P (reg))
2182 return;
2183
2184
2185 endregno = regno = REGNO (reg);
2186 if (regno < FIRST_PSEUDO_REGISTER)
2187 {
2188 endregno +=hard_regno_nregs[regno][GET_MODE (reg)];
2189
2190 for (i = regno; i < endregno; i++)
2191 {
2192 bitmap_set_bit (bb_info->kill, i);
2193
2194 if (GET_CODE (setter) != CLOBBER)
2195 bitmap_set_bit (bb_info->gen, i);
2196 else
2197 bitmap_clear_bit (bb_info->gen, i);
2198 }
2199 }
2200 else
2201 {
2202 bitmap_set_bit (bb_info->kill, regno);
2203
2204 if (GET_CODE (setter) != CLOBBER)
2205 bitmap_set_bit (bb_info->gen, regno);
2206 else
2207 bitmap_clear_bit (bb_info->gen, regno);
2208 }
2209 }
2210 /* Classes of registers which could be early clobbered in the current
2211 insn. */
2212
2213 DEF_VEC_I(int);
2214 DEF_VEC_ALLOC_I(int,heap);
2215
2216 static VEC(int,heap) *earlyclobber_regclass;
2217
2218 /* This function finds and stores register classes that could be early
2219 clobbered in INSN. If any earlyclobber classes are found, the function
2220 returns TRUE, in all other cases it returns FALSE. */
2221
2222 static bool
2223 df_urec_check_earlyclobber (rtx insn)
2224 {
2225 int opno;
2226 bool found = false;
2227
2228 extract_insn (insn);
2229
2230 VEC_truncate (int, earlyclobber_regclass, 0);
2231 for (opno = 0; opno < recog_data.n_operands; opno++)
2232 {
2233 char c;
2234 bool amp_p;
2235 int i;
2236 enum reg_class class;
2237 const char *p = recog_data.constraints[opno];
2238
2239 class = NO_REGS;
2240 amp_p = false;
2241 for (;;)
2242 {
2243 c = *p;
2244 switch (c)
2245 {
2246 case '=': case '+': case '?':
2247 case '#': case '!':
2248 case '*': case '%':
2249 case 'm': case '<': case '>': case 'V': case 'o':
2250 case 'E': case 'F': case 'G': case 'H':
2251 case 's': case 'i': case 'n':
2252 case 'I': case 'J': case 'K': case 'L':
2253 case 'M': case 'N': case 'O': case 'P':
2254 case 'X':
2255 case '0': case '1': case '2': case '3': case '4':
2256 case '5': case '6': case '7': case '8': case '9':
2257 /* These don't say anything we care about. */
2258 break;
2259
2260 case '&':
2261 amp_p = true;
2262 break;
2263 case '\0':
2264 case ',':
2265 if (amp_p && class != NO_REGS)
2266 {
2267 int rc;
2268
2269 found = true;
2270 for (i = 0;
2271 VEC_iterate (int, earlyclobber_regclass, i, rc);
2272 i++)
2273 {
2274 if (rc == (int) class)
2275 goto found_rc;
2276 }
2277
2278 /* We use VEC_quick_push here because
2279 earlyclobber_regclass holds no more than
2280 N_REG_CLASSES elements. */
2281 VEC_quick_push (int, earlyclobber_regclass, (int) class);
2282 found_rc:
2283 ;
2284 }
2285
2286 amp_p = false;
2287 class = NO_REGS;
2288 break;
2289
2290 case 'r':
2291 class = GENERAL_REGS;
2292 break;
2293
2294 default:
2295 class = REG_CLASS_FROM_CONSTRAINT (c, p);
2296 break;
2297 }
2298 if (c == '\0')
2299 break;
2300 p += CONSTRAINT_LEN (c, p);
2301 }
2302 }
2303
2304 return found;
2305 }
2306
2307 /* The function checks that pseudo-register *X has a class
2308 intersecting with the class of pseudo-register could be early
2309 clobbered in the same insn.
2310
2311 This function is a no-op if earlyclobber_regclass is empty.
2312
2313 Reload can assign the same hard register to uninitialized
2314 pseudo-register and early clobbered pseudo-register in an insn if
2315 the pseudo-register is used first time in given BB and not lived at
2316 the BB start. To prevent this we don't change life information for
2317 such pseudo-registers. */
2318
2319 static int
2320 df_urec_mark_reg_use_for_earlyclobber (rtx *x, void *data)
2321 {
2322 enum reg_class pref_class, alt_class;
2323 int i, regno;
2324 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2325
2326 if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
2327 {
2328 int rc;
2329
2330 regno = REGNO (*x);
2331 if (bitmap_bit_p (bb_info->kill, regno)
2332 || bitmap_bit_p (bb_info->gen, regno))
2333 return 0;
2334 pref_class = reg_preferred_class (regno);
2335 alt_class = reg_alternate_class (regno);
2336 for (i = 0; VEC_iterate (int, earlyclobber_regclass, i, rc); i++)
2337 {
2338 if (reg_classes_intersect_p (rc, pref_class)
2339 || (rc != NO_REGS
2340 && reg_classes_intersect_p (rc, alt_class)))
2341 {
2342 bitmap_set_bit (bb_info->earlyclobber, regno);
2343 break;
2344 }
2345 }
2346 }
2347 return 0;
2348 }
2349
2350 /* The function processes all pseudo-registers in *X with the aid of
2351 previous function. */
2352
2353 static void
2354 df_urec_mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
2355 {
2356 for_each_rtx (x, df_urec_mark_reg_use_for_earlyclobber, data);
2357 }
2358
2359
2360 /* Compute local uninitialized register info for basic block BB. */
2361
2362 static void
2363 df_urec_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
2364 {
2365 struct df *df = dflow->df;
2366 basic_block bb = BASIC_BLOCK (bb_index);
2367 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2368 rtx insn;
2369 struct df_ref *def;
2370
2371 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2372 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
2373 {
2374 unsigned int regno = DF_REF_REGNO (def);
2375 bitmap_set_bit (bb_info->gen, regno);
2376 }
2377
2378 FOR_BB_INSNS (bb, insn)
2379 {
2380 if (INSN_P (insn))
2381 {
2382 note_stores (PATTERN (insn), df_urec_mark_reg_change, bb_info);
2383 if (df_state & (DF_SCAN_GLOBAL | DF_SCAN_POST_ALLOC)
2384 && df_urec_check_earlyclobber (insn))
2385 {
2386 struct df_urec_problem_data *problem_data =
2387 (struct df_urec_problem_data *) dflow->problem_data;
2388 problem_data->earlyclobbers_found = true;
2389 note_uses (&PATTERN (insn),
2390 df_urec_mark_reg_use_for_earlyclobber_1, bb_info);
2391 }
2392 }
2393 }
2394
2395 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2396 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
2397 {
2398 unsigned int regno = DF_REF_REGNO (def);
2399 bitmap_set_bit (bb_info->gen, regno);
2400 }
2401
2402 }
2403
2404
2405 /* Compute local uninitialized register info. */
2406
2407 static void
2408 df_urec_local_compute (struct dataflow *dflow,
2409 bitmap all_blocks ATTRIBUTE_UNUSED,
2410 bitmap rescan_blocks)
2411 {
2412 unsigned int bb_index;
2413 bitmap_iterator bi;
2414 #ifdef STACK_REGS
2415 int i;
2416 HARD_REG_SET zero, stack_hard_regs, used;
2417 struct df_urec_problem_data *problem_data =
2418 (struct df_urec_problem_data *) dflow->problem_data;
2419
2420 /* Any register that MAY be allocated to a register stack (like the
2421 387) is treated poorly. Each such register is marked as being
2422 live everywhere. This keeps the register allocator and the
2423 subsequent passes from doing anything useful with these values.
2424
2425 FIXME: This seems like an incredibly poor idea. */
2426
2427 CLEAR_HARD_REG_SET (zero);
2428 CLEAR_HARD_REG_SET (stack_hard_regs);
2429 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
2430 SET_HARD_REG_BIT (stack_hard_regs, i);
2431 problem_data->stack_regs = BITMAP_ALLOC (NULL);
2432 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2433 {
2434 COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
2435 IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
2436 AND_HARD_REG_SET (used, stack_hard_regs);
2437 GO_IF_HARD_REG_EQUAL (used, zero, skip);
2438 bitmap_set_bit (problem_data->stack_regs, i);
2439 skip:
2440 ;
2441 }
2442 #endif
2443
2444 /* We know that earlyclobber_regclass holds no more than
2445 N_REG_CLASSES elements. See df_urec_check_earlyclobber. */
2446 earlyclobber_regclass = VEC_alloc (int, heap, N_REG_CLASSES);
2447
2448 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
2449 {
2450 df_urec_bb_local_compute (dflow, bb_index);
2451 }
2452
2453 VEC_free (int, heap, earlyclobber_regclass);
2454 }
2455
2456
2457 /* Initialize the solution vectors. */
2458
2459 static void
2460 df_urec_init (struct dataflow *dflow, bitmap all_blocks)
2461 {
2462 unsigned int bb_index;
2463 bitmap_iterator bi;
2464
2465 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2466 {
2467 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2468
2469 bitmap_copy (bb_info->out, bb_info->gen);
2470 bitmap_clear (bb_info->in);
2471 }
2472 }
2473
2474
2475 /* Or in the stack regs, hard regs and early clobber regs into the the
2476 ur_in sets of all of the blocks. */
2477
2478 static void
2479 df_urec_local_finalize (struct dataflow *dflow, bitmap all_blocks)
2480 {
2481 struct df *df = dflow->df;
2482 struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
2483 bitmap tmp = BITMAP_ALLOC (NULL);
2484 bitmap_iterator bi;
2485 unsigned int bb_index;
2486 struct df_urec_problem_data *problem_data =
2487 (struct df_urec_problem_data *) dflow->problem_data;
2488
2489 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2490 {
2491 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2492 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
2493
2494 if (bb_index != ENTRY_BLOCK && bb_index != EXIT_BLOCK)
2495 {
2496 if (problem_data->earlyclobbers_found)
2497 bitmap_ior_into (bb_info->in, bb_info->earlyclobber);
2498
2499 #ifdef STACK_REGS
2500 /* We can not use the same stack register for uninitialized
2501 pseudo-register and another living pseudo-register
2502 because if the uninitialized pseudo-register dies,
2503 subsequent pass reg-stack will be confused (it will
2504 believe that the other register dies). */
2505 bitmap_ior_into (bb_info->in, problem_data->stack_regs);
2506 bitmap_ior_into (bb_info->out, problem_data->stack_regs);
2507 #endif
2508 }
2509
2510 /* No register may reach a location where it is not used. Thus
2511 we trim the rr result to the places where it is used. */
2512 bitmap_and_into (bb_info->in, bb_lr_info->in);
2513 bitmap_and_into (bb_info->out, bb_lr_info->out);
2514
2515 #if 1
2516 /* Hard registers may still stick in the ur_out set, but not
2517 be in the ur_in set, if their only mention was in a call
2518 in this block. This is because a call kills in the lr
2519 problem but does not kill in the rr problem. To clean
2520 this up, we execute the transfer function on the lr_in
2521 set and then use that to knock bits out of ur_out. */
2522 bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
2523 bb_info->kill);
2524 bitmap_and_into (bb_info->out, tmp);
2525 #endif
2526 }
2527
2528 #ifdef STACK_REGS
2529 BITMAP_FREE (problem_data->stack_regs);
2530 #endif
2531 BITMAP_FREE (tmp);
2532 }
2533
2534
2535 /* Confluence function that ignores fake edges. */
2536
2537 static void
2538 df_urec_confluence_n (struct dataflow *dflow, edge e)
2539 {
2540 bitmap op1 = df_urec_get_bb_info (dflow, e->dest->index)->in;
2541 bitmap op2 = df_urec_get_bb_info (dflow, e->src->index)->out;
2542
2543 if (e->flags & EDGE_FAKE)
2544 return;
2545
2546 bitmap_ior_into (op1, op2);
2547 }
2548
2549
2550 /* Transfer function. */
2551
2552 static bool
2553 df_urec_transfer_function (struct dataflow *dflow, int bb_index)
2554 {
2555 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2556 bitmap in = bb_info->in;
2557 bitmap out = bb_info->out;
2558 bitmap gen = bb_info->gen;
2559 bitmap kill = bb_info->kill;
2560
2561 return bitmap_ior_and_compl (out, gen, in, kill);
2562 }
2563
2564
2565 /* Free all storage associated with the problem. */
2566
2567 static void
2568 df_urec_free (struct dataflow *dflow)
2569 {
2570 if (dflow->block_info)
2571 {
2572 unsigned int i;
2573
2574 for (i = 0; i < dflow->block_info_size; i++)
2575 {
2576 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, i);
2577 if (bb_info)
2578 {
2579 BITMAP_FREE (bb_info->gen);
2580 BITMAP_FREE (bb_info->kill);
2581 BITMAP_FREE (bb_info->in);
2582 BITMAP_FREE (bb_info->out);
2583 BITMAP_FREE (bb_info->earlyclobber);
2584 }
2585 }
2586
2587 free_alloc_pool (dflow->block_pool);
2588
2589 dflow->block_info_size = 0;
2590 free (dflow->block_info);
2591 free (dflow->problem_data);
2592 }
2593 free (dflow);
2594 }
2595
2596
2597 /* Debugging info. */
2598
2599 static void
2600 df_urec_dump (struct dataflow *dflow, FILE *file)
2601 {
2602 basic_block bb;
2603
2604 fprintf (file, "Undefined regs:\n");
2605
2606 FOR_ALL_BB (bb)
2607 {
2608 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb->index);
2609 df_print_bb_index (bb, file);
2610
2611 if (! bb_info->in)
2612 continue;
2613
2614 fprintf (file, " in \t");
2615 dump_bitmap (file, bb_info->in);
2616 fprintf (file, " gen \t");
2617 dump_bitmap (file, bb_info->gen);
2618 fprintf (file, " kill\t");
2619 dump_bitmap (file, bb_info->kill);
2620 fprintf (file, " ec\t");
2621 dump_bitmap (file, bb_info->earlyclobber);
2622 fprintf (file, " out \t");
2623 dump_bitmap (file, bb_info->out);
2624 }
2625 }
2626
2627 /* All of the information associated with every instance of the problem. */
2628
2629 static struct df_problem problem_UREC =
2630 {
2631 DF_UREC, /* Problem id. */
2632 DF_FORWARD, /* Direction. */
2633 df_urec_alloc, /* Allocate the problem specific data. */
2634 NULL, /* Reset global information. */
2635 df_urec_free_bb_info, /* Free basic block info. */
2636 df_urec_local_compute, /* Local compute function. */
2637 df_urec_init, /* Init the solution specific data. */
2638 df_iterative_dataflow, /* Iterative solver. */
2639 NULL, /* Confluence operator 0. */
2640 df_urec_confluence_n, /* Confluence operator n. */
2641 df_urec_transfer_function, /* Transfer function. */
2642 df_urec_local_finalize, /* Finalize function. */
2643 df_urec_free, /* Free all of the problem information. */
2644 df_urec_dump, /* Debugging. */
2645 &problem_LR /* Dependent problem. */
2646 };
2647
2648
2649 /* Create a new DATAFLOW instance and add it to an existing instance
2650 of DF. The returned structure is what is used to get at the
2651 solution. */
2652
2653 struct dataflow *
2654 df_urec_add_problem (struct df *df)
2655 {
2656 return df_add_problem (df, &problem_UREC);
2657 }
2658
2659
2660 \f
2661 /*----------------------------------------------------------------------------
2662 CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
2663
2664 Link either the defs to the uses and / or the uses to the defs.
2665
2666 These problems are set up like the other dataflow problems so that
2667 they nicely fit into the framework. They are much simpler and only
2668 involve a single traversal of instructions and an examination of
2669 the reaching defs information (the dependent problem).
2670 ----------------------------------------------------------------------------*/
2671
2672 struct df_chain_problem_data
2673 {
2674 int flags;
2675 };
2676
2677
2678 /* Create def-use or use-def chains. */
2679
2680 static void
2681 df_chain_alloc (struct dataflow *dflow,
2682 bitmap blocks_to_rescan ATTRIBUTE_UNUSED)
2683 {
2684 struct df *df = dflow->df;
2685 unsigned int i;
2686 struct df_chain_problem_data *problem_data =
2687 (struct df_chain_problem_data *) dflow->problem_data;
2688
2689 /* Wholesale destruction of the old chains. */
2690 if (dflow->block_pool)
2691 free_alloc_pool (dflow->block_pool);
2692
2693 dflow->block_pool = create_alloc_pool ("df_chain_chain_block pool",
2694 sizeof (struct df_link), 100);
2695
2696 if (problem_data->flags & DF_DU_CHAIN)
2697 {
2698 if (!df->def_info.refs_organized)
2699 df_reorganize_refs (&df->def_info);
2700
2701 /* Clear out the pointers from the refs. */
2702 for (i = 0; i < DF_DEFS_SIZE (df); i++)
2703 {
2704 struct df_ref *ref = df->def_info.refs[i];
2705 DF_REF_CHAIN (ref) = NULL;
2706 }
2707 }
2708
2709 if (problem_data->flags & DF_UD_CHAIN)
2710 {
2711 if (!df->use_info.refs_organized)
2712 df_reorganize_refs (&df->use_info);
2713 for (i = 0; i < DF_USES_SIZE (df); i++)
2714 {
2715 struct df_ref *ref = df->use_info.refs[i];
2716 DF_REF_CHAIN (ref) = NULL;
2717 }
2718 }
2719 }
2720
2721
2722 /* Reset all def_use and use_def chains in INSN. */
2723
2724 static void
2725 df_chain_insn_reset (struct dataflow *dflow, rtx insn)
2726 {
2727 struct df *df = dflow->df;
2728 struct df_chain_problem_data *problem_data =
2729 (struct df_chain_problem_data *) dflow->problem_data;
2730 unsigned int uid = INSN_UID (insn);
2731 struct df_insn_info *insn_info = NULL;
2732 struct df_ref *ref;
2733
2734 if (uid < df->insns_size)
2735 insn_info = DF_INSN_UID_GET (df, uid);
2736
2737 if (insn_info)
2738 {
2739 if (problem_data->flags & DF_DU_CHAIN)
2740 {
2741 ref = insn_info->defs;
2742 while (ref)
2743 {
2744 ref->chain = NULL;
2745 ref = ref->next_ref;
2746 }
2747 }
2748
2749 if (problem_data->flags & DF_UD_CHAIN)
2750 {
2751 ref = insn_info->uses;
2752 while (ref)
2753 {
2754 ref->chain = NULL;
2755 ref = ref->next_ref;
2756 }
2757 }
2758 }
2759 }
2760
2761
2762 /* Reset all def_use and use_def chains in basic block. */
2763
2764 static void
2765 df_chain_bb_reset (struct dataflow *dflow, unsigned int bb_index)
2766 {
2767 struct df *df = dflow->df;
2768 struct df_chain_problem_data *problem_data =
2769 (struct df_chain_problem_data *) dflow->problem_data;
2770 rtx insn;
2771 basic_block bb = BASIC_BLOCK (bb_index);
2772
2773 /* Some one deleted the basic block out from under us. */
2774 if (!bb)
2775 return;
2776
2777 FOR_BB_INSNS (bb, insn)
2778 {
2779 if (INSN_P (insn))
2780 {
2781 /* Record defs within INSN. */
2782 df_chain_insn_reset (dflow, insn);
2783 }
2784 }
2785
2786 /* Get rid of any chains in artifical uses or defs. */
2787 if (problem_data->flags & DF_DU_CHAIN)
2788 {
2789 struct df_ref *def;
2790 def = df_get_artificial_defs (df, bb_index);
2791 while (def)
2792 {
2793 def->chain = NULL;
2794 def = def->next_ref;
2795 }
2796 }
2797
2798 if (problem_data->flags & DF_UD_CHAIN)
2799 {
2800 struct df_ref *use;
2801 use = df_get_artificial_uses (df, bb_index);
2802 while (use)
2803 {
2804 use->chain = NULL;
2805 use = use->next_ref;
2806 }
2807 }
2808 }
2809
2810
2811 /* Reset all of the chains when the set of basic blocks changes. */
2812
2813
2814 static void
2815 df_chain_reset (struct dataflow *dflow, bitmap blocks_to_clear)
2816 {
2817 bitmap_iterator bi;
2818 unsigned int bb_index;
2819
2820 EXECUTE_IF_SET_IN_BITMAP (blocks_to_clear, 0, bb_index, bi)
2821 {
2822 df_chain_bb_reset (dflow, bb_index);
2823 }
2824
2825 free_alloc_pool (dflow->block_pool);
2826 dflow->block_pool = NULL;
2827 }
2828
2829
2830 /* Create the chains for a list of USEs. */
2831
2832 static void
2833 df_chain_create_bb_process_use (struct dataflow *dflow,
2834 struct df_chain_problem_data *problem_data,
2835 bitmap local_rd,
2836 struct df_ref *use,
2837 enum df_ref_flags top_flag)
2838 {
2839 struct df *df = dflow->df;
2840 bitmap_iterator bi;
2841 unsigned int def_index;
2842
2843 while (use)
2844 {
2845 /* Do not want to go through this for an uninitialized var. */
2846 unsigned int uregno = DF_REF_REGNO (use);
2847 int count = DF_REG_DEF_GET (df, uregno)->n_refs;
2848 if (count)
2849 {
2850 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2851 {
2852 unsigned int first_index = DF_REG_DEF_GET (df, uregno)->begin;
2853 unsigned int last_index = first_index + count - 1;
2854
2855 EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
2856 {
2857 struct df_ref *def;
2858 if (def_index > last_index)
2859 break;
2860
2861 def = DF_DEFS_GET (df, def_index);
2862 if (problem_data->flags & DF_DU_CHAIN)
2863 df_chain_create (dflow, def, use);
2864 if (problem_data->flags & DF_UD_CHAIN)
2865 df_chain_create (dflow, use, def);
2866 }
2867 }
2868 }
2869 use = use->next_ref;
2870 }
2871 }
2872
2873 /* Reset the storage pool that the def-use or use-def chains have been
2874 allocated in. We do not need to re adjust the pointers in the refs,
2875 these have already been clean out.*/
2876
2877 /* Create chains from reaching defs bitmaps for basic block BB. */
2878 static void
2879 df_chain_create_bb (struct dataflow *dflow,
2880 struct dataflow *rd_dflow,
2881 unsigned int bb_index)
2882 {
2883 basic_block bb = BASIC_BLOCK (bb_index);
2884 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (rd_dflow, bb_index);
2885 rtx insn;
2886 bitmap cpy = BITMAP_ALLOC (NULL);
2887 struct df *df = dflow->df;
2888 struct df_chain_problem_data *problem_data =
2889 (struct df_chain_problem_data *) dflow->problem_data;
2890 struct df_ref *def;
2891
2892 bitmap_copy (cpy, bb_info->in);
2893
2894 /* Since we are going forwards, process the artificial uses first
2895 then the artificial defs second. */
2896
2897 #ifdef EH_USES
2898 /* Create the chains for the artificial uses from the EH_USES at the
2899 beginning of the block. */
2900 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2901 df_get_artificial_uses (df, bb->index),
2902 DF_REF_AT_TOP);
2903 #endif
2904
2905 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2906 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
2907 {
2908 unsigned int dregno = DF_REF_REGNO (def);
2909 bitmap_clear_range (cpy,
2910 DF_REG_DEF_GET (df, dregno)->begin,
2911 DF_REG_DEF_GET (df, dregno)->n_refs);
2912 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
2913 bitmap_set_bit (cpy, DF_REF_ID (def));
2914 }
2915
2916 /* Process the regular instructions next. */
2917 FOR_BB_INSNS (bb, insn)
2918 {
2919 struct df_ref *def;
2920 unsigned int uid = INSN_UID (insn);
2921
2922 if (! INSN_P (insn))
2923 continue;
2924
2925 /* Now scan the uses and link them up with the defs that remain
2926 in the cpy vector. */
2927
2928 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2929 DF_INSN_UID_GET (df, uid)->uses, 0);
2930
2931 /* Since we are going forwards, process the defs second. This
2932 pass only changes the bits in cpy. */
2933 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
2934 {
2935 unsigned int dregno = DF_REF_REGNO (def);
2936 bitmap_clear_range (cpy,
2937 DF_REG_DEF_GET (df, dregno)->begin,
2938 DF_REG_DEF_GET (df, dregno)->n_refs);
2939 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
2940 bitmap_set_bit (cpy, DF_REF_ID (def));
2941 }
2942 }
2943
2944 /* Create the chains for the artificial uses of the hard registers
2945 at the end of the block. */
2946 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2947 df_get_artificial_uses (df, bb->index), 0);
2948 }
2949
2950 /* Create def-use chains from reaching use bitmaps for basic blocks
2951 in BLOCKS. */
2952
2953 static void
2954 df_chain_finalize (struct dataflow *dflow, bitmap all_blocks)
2955 {
2956 unsigned int bb_index;
2957 bitmap_iterator bi;
2958 struct df *df = dflow->df;
2959 struct dataflow *rd_dflow = df->problems_by_index [DF_RD];
2960
2961 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2962 {
2963 df_chain_create_bb (dflow, rd_dflow, bb_index);
2964 }
2965 }
2966
2967
2968 /* Free all storage associated with the problem. */
2969
2970 static void
2971 df_chain_free (struct dataflow *dflow)
2972 {
2973 free_alloc_pool (dflow->block_pool);
2974 free (dflow->problem_data);
2975 free (dflow);
2976 }
2977
2978
2979 /* Debugging info. */
2980
2981 static void
2982 df_chains_dump (struct dataflow *dflow, FILE *file)
2983 {
2984 struct df *df = dflow->df;
2985 unsigned int j;
2986 struct df_chain_problem_data *problem_data =
2987 (struct df_chain_problem_data *) dflow->problem_data;
2988
2989 if (problem_data->flags & DF_DU_CHAIN)
2990 {
2991 fprintf (file, "Def-use chains:\n");
2992 for (j = 0; j < df->def_info.bitmap_size; j++)
2993 {
2994 struct df_ref *def = DF_DEFS_GET (df, j);
2995 if (def)
2996 {
2997 fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
2998 j, DF_REF_BBNO (def),
2999 DF_INSN_LUID (df, DF_REF_INSN (def)),
3000 DF_REF_INSN (def) ? DF_REF_INSN_UID (def) : -1,
3001 DF_REF_REGNO (def));
3002 if (def->flags & DF_REF_READ_WRITE)
3003 fprintf (file, "read/write ");
3004 df_chain_dump (df, DF_REF_CHAIN (def), file);
3005 fprintf (file, "\n");
3006 }
3007 }
3008 }
3009
3010 if (problem_data->flags & DF_UD_CHAIN)
3011 {
3012 fprintf (file, "Use-def chains:\n");
3013 for (j = 0; j < df->use_info.bitmap_size; j++)
3014 {
3015 struct df_ref *use = DF_USES_GET (df, j);
3016 if (use)
3017 {
3018 fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
3019 j, DF_REF_BBNO (use),
3020 DF_REF_INSN (use) ?
3021 DF_INSN_LUID (df, DF_REF_INSN (use))
3022 : -1,
3023 DF_REF_INSN (DF_USES_GET (df, j)) ?
3024 DF_REF_INSN_UID (DF_USES_GET (df,j))
3025 : -1,
3026 DF_REF_REGNO (use));
3027 if (use->flags & DF_REF_READ_WRITE)
3028 fprintf (file, "read/write ");
3029 if (use->flags & DF_REF_STRIPPED)
3030 fprintf (file, "stripped ");
3031 if (use->flags & DF_REF_IN_NOTE)
3032 fprintf (file, "note ");
3033 df_chain_dump (df, DF_REF_CHAIN (use), file);
3034 fprintf (file, "\n");
3035 }
3036 }
3037 }
3038 }
3039
3040
3041 static struct df_problem problem_CHAIN =
3042 {
3043 DF_CHAIN, /* Problem id. */
3044 DF_NONE, /* Direction. */
3045 df_chain_alloc, /* Allocate the problem specific data. */
3046 df_chain_reset, /* Reset global information. */
3047 NULL, /* Free basic block info. */
3048 NULL, /* Local compute function. */
3049 NULL, /* Init the solution specific data. */
3050 NULL, /* Iterative solver. */
3051 NULL, /* Confluence operator 0. */
3052 NULL, /* Confluence operator n. */
3053 NULL, /* Transfer function. */
3054 df_chain_finalize, /* Finalize function. */
3055 df_chain_free, /* Free all of the problem information. */
3056 df_chains_dump, /* Debugging. */
3057 &problem_RD /* Dependent problem. */
3058 };
3059
3060
3061 /* Create a new DATAFLOW instance and add it to an existing instance
3062 of DF. The returned structure is what is used to get at the
3063 solution. */
3064
3065 struct dataflow *
3066 df_chain_add_problem (struct df *df, int flags)
3067 {
3068 struct df_chain_problem_data *problem_data =
3069 xmalloc (sizeof (struct df_chain_problem_data));
3070 struct dataflow *dflow = df_add_problem (df, &problem_CHAIN);
3071
3072 dflow->problem_data = problem_data;
3073 problem_data->flags = flags;
3074
3075 return dflow;
3076 }
3077
3078
3079 /*----------------------------------------------------------------------------
3080 REGISTER INFORMATION
3081
3082 Currently this consists of only lifetime information. But the plan is
3083 to enhance it so that it produces all of the register information needed
3084 by the register allocators.
3085 ----------------------------------------------------------------------------*/
3086
3087
3088 struct df_ri_problem_data
3089 {
3090 int *lifetime;
3091 };
3092
3093
3094 /* Allocate the lifetime information. */
3095
3096 static void
3097 df_ri_alloc (struct dataflow *dflow, bitmap blocks_to_rescan ATTRIBUTE_UNUSED)
3098 {
3099 struct df_ri_problem_data *problem_data =
3100 (struct df_ri_problem_data *) dflow->problem_data;
3101
3102 if (!dflow->problem_data)
3103 {
3104 struct df_ri_problem_data *problem_data =
3105 xmalloc (sizeof (struct df_ri_problem_data));
3106 dflow->problem_data = problem_data;
3107 }
3108
3109 problem_data->lifetime = xrealloc (problem_data->lifetime,
3110 max_reg_num () *sizeof (int));
3111 memset (problem_data->lifetime, 0, max_reg_num () *sizeof (int));
3112 }
3113
3114 /* Compute register info: lifetime, bb, and number of defs and uses
3115 for basic block BB. */
3116
3117 static void
3118 df_ri_bb_compute (struct dataflow *dflow, unsigned int bb_index, bitmap live)
3119 {
3120 struct df *df = dflow->df;
3121 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
3122 struct df_ri_problem_data *problem_data =
3123 (struct df_ri_problem_data *) dflow->problem_data;
3124 basic_block bb = BASIC_BLOCK (bb_index);
3125 rtx insn;
3126
3127 bitmap_copy (live, bb_info->out);
3128
3129 FOR_BB_INSNS_REVERSE (bb, insn)
3130 {
3131 unsigned int uid = INSN_UID (insn);
3132 unsigned int regno;
3133 bitmap_iterator bi;
3134 struct df_ref *def;
3135 struct df_ref *use;
3136
3137 if (! INSN_P (insn))
3138 continue;
3139
3140 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
3141 {
3142 unsigned int dregno = DF_REF_REGNO (def);
3143
3144 /* Kill this register. */
3145 bitmap_clear_bit (live, dregno);
3146 }
3147
3148 for (use = DF_INSN_UID_GET (df, uid)->uses; use; use = use->next_ref)
3149 {
3150 unsigned int uregno = DF_REF_REGNO (use);
3151
3152 if (!bitmap_bit_p (live, uregno))
3153 {
3154 use->flags |= DF_REF_DIES_AFTER_THIS_USE;
3155 /* This register is now live. */
3156 bitmap_set_bit (live, uregno);
3157 }
3158 else
3159 use->flags &= ~DF_REF_DIES_AFTER_THIS_USE;
3160 }
3161
3162 /* Increment lifetimes of all live registers. */
3163 EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
3164 {
3165 problem_data->lifetime[regno]++;
3166 }
3167 }
3168 }
3169
3170
3171 /* Compute register info: lifetime, bb, and number of defs and uses. */
3172 static void
3173 df_ri_compute (struct dataflow *dflow, bitmap all_blocks ATTRIBUTE_UNUSED,
3174 bitmap blocks_to_scan)
3175 {
3176 unsigned int bb_index;
3177 bitmap_iterator bi;
3178 bitmap live;
3179
3180 live = BITMAP_ALLOC (NULL);
3181
3182 EXECUTE_IF_SET_IN_BITMAP (blocks_to_scan, 0, bb_index, bi)
3183 {
3184 df_ri_bb_compute (dflow, bb_index, live);
3185 }
3186
3187 BITMAP_FREE (live);
3188 }
3189
3190
3191 /* Free all storage associated with the problem. */
3192
3193 static void
3194 df_ri_free (struct dataflow *dflow)
3195 {
3196 struct df_ri_problem_data *problem_data =
3197 (struct df_ri_problem_data *) dflow->problem_data;
3198
3199 free (problem_data->lifetime);
3200 free (dflow->problem_data);
3201 free (dflow);
3202 }
3203
3204
3205 /* Debugging info. */
3206
3207 static void
3208 df_ri_dump (struct dataflow *dflow, FILE *file)
3209 {
3210 struct df_ri_problem_data *problem_data =
3211 (struct df_ri_problem_data *) dflow->problem_data;
3212 int j;
3213
3214 fprintf (file, "Register info:\n");
3215 for (j = 0; j < max_reg_num (); j++)
3216 {
3217 fprintf (file, "reg %d life %d\n", j, problem_data->lifetime[j]);
3218 }
3219 }
3220
3221 /* All of the information associated every instance of the problem. */
3222
3223 static struct df_problem problem_RI =
3224 {
3225 DF_RI, /* Problem id. */
3226 DF_NONE, /* Direction. */
3227 df_ri_alloc, /* Allocate the problem specific data. */
3228 NULL, /* Reset global information. */
3229 NULL, /* Free basic block info. */
3230 df_ri_compute, /* Local compute function. */
3231 NULL, /* Init the solution specific data. */
3232 NULL, /* Iterative solver. */
3233 NULL, /* Confluence operator 0. */
3234 NULL, /* Confluence operator n. */
3235 NULL, /* Transfer function. */
3236 NULL, /* Finalize function. */
3237 df_ri_free, /* Free all of the problem information. */
3238 df_ri_dump, /* Debugging. */
3239 &problem_UR /* Dependent problem. */
3240 };
3241
3242
3243 /* Create a new DATAFLOW instance and add it to an existing instance
3244 of DF. The returned structure is what is used to get at the
3245 solution. */
3246
3247 struct dataflow *
3248 df_ri_add_problem (struct df *df)
3249 {
3250 return df_add_problem (df, &problem_RI);
3251 }
3252
3253
3254 /* Return total lifetime (in insns) of REG. */
3255 int
3256 df_reg_lifetime (struct df *df, rtx reg)
3257 {
3258 struct dataflow *dflow = df->problems_by_index[DF_RI];
3259 struct df_ri_problem_data *problem_data =
3260 (struct df_ri_problem_data *) dflow->problem_data;
3261 return problem_data->lifetime[REGNO (reg)];
3262 }
3263
3264
This page took 0.17634 seconds and 6 git commands to generate.