]> gcc.gnu.org Git - gcc.git/blob - gcc/df-scan.c
c-lex.c (c_lex_with_flags, lex_string): Constify.
[gcc.git] / gcc / df-scan.c
1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 2, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING. If not, write to the Free
23 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 02110-1301, USA.
25 */
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "rtl.h"
32 #include "tm_p.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "function.h"
36 #include "regs.h"
37 #include "output.h"
38 #include "alloc-pool.h"
39 #include "flags.h"
40 #include "hard-reg-set.h"
41 #include "basic-block.h"
42 #include "sbitmap.h"
43 #include "bitmap.h"
44 #include "timevar.h"
45 #include "tree.h"
46 #include "target.h"
47 #include "target-def.h"
48 #include "df.h"
49 #include "tree-pass.h"
50
51 #ifndef HAVE_epilogue
52 #define HAVE_epilogue 0
53 #endif
54 #ifndef HAVE_prologue
55 #define HAVE_prologue 0
56 #endif
57 #ifndef HAVE_sibcall_epilogue
58 #define HAVE_sibcall_epilogue 0
59 #endif
60
61 #ifndef EPILOGUE_USES
62 #define EPILOGUE_USES(REGNO) 0
63 #endif
64
65 /* The bitmap_obstack is used to hold some static variables that
66 should not be reset after each function is compiled. */
67
68 static bitmap_obstack persistent_obstack;
69
70 /* The set of hard registers in eliminables[i].from. */
71
72 static HARD_REG_SET elim_reg_set;
73
74 /* This is a bitmap copy of regs_invalidated_by_call so that we can
75 easily add it into bitmaps, etc. */
76
77 bitmap df_invalidated_by_call = NULL;
78
79 /* Initialize ur_in and ur_out as if all hard registers were partially
80 available. */
81
82 struct df_collection_rec
83 {
84 struct df_ref ** def_vec;
85 unsigned int next_def;
86 struct df_ref ** use_vec;
87 unsigned int next_use;
88 struct df_ref ** eq_use_vec;
89 unsigned int next_eq_use;
90 struct df_mw_hardreg **mw_vec;
91 unsigned int next_mw;
92 };
93
94 static struct df_ref * df_null_ref_rec[1];
95 static struct df_mw_hardreg * df_null_mw_rec[1];
96
97 static void df_ref_record (struct df_collection_rec *,
98 rtx, rtx *,
99 basic_block, rtx, enum df_ref_type,
100 enum df_ref_flags);
101 static void df_def_record_1 (struct df_collection_rec *,
102 rtx, basic_block, rtx,
103 enum df_ref_flags);
104 static void df_defs_record (struct df_collection_rec *,
105 rtx, basic_block, rtx,
106 enum df_ref_flags);
107 static void df_uses_record (struct df_collection_rec *,
108 rtx *, enum df_ref_type,
109 basic_block, rtx, enum df_ref_flags);
110
111 static struct df_ref *df_ref_create_structure (struct df_collection_rec *, rtx, rtx *,
112 basic_block, rtx, enum df_ref_type,
113 enum df_ref_flags);
114
115 static void df_insn_refs_collect (struct df_collection_rec*,
116 basic_block, rtx);
117 static void df_canonize_collection_rec (struct df_collection_rec *);
118
119 static void df_get_regular_block_artificial_uses (bitmap);
120 static void df_get_eh_block_artificial_uses (bitmap);
121
122 static void df_record_entry_block_defs (bitmap);
123 static void df_record_exit_block_uses (bitmap);
124 static void df_get_exit_block_use_set (bitmap);
125 static void df_get_entry_block_def_set (bitmap);
126 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
127 static void df_ref_chain_delete_du_chain (struct df_ref **);
128 static void df_ref_chain_delete (struct df_ref **);
129
130 static void df_refs_add_to_chains (struct df_collection_rec *,
131 basic_block, rtx);
132
133 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
134 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
135 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
136 static void df_install_ref (struct df_ref *, struct df_reg_info *,
137 struct df_ref_info *, bool);
138
139 static int df_ref_compare (const void *, const void *);
140 static int df_mw_compare (const void *, const void *);
141
142 /* Indexed by hardware reg number, is true if that register is ever
143 used in the current function.
144
145 In df-scan.c, this is set up to record the hard regs used
146 explicitly. Reload adds in the hard regs used for holding pseudo
147 regs. Final uses it to generate the code in the function prologue
148 and epilogue to save and restore registers as needed. */
149
150 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
151 \f
152 /*----------------------------------------------------------------------------
153 SCANNING DATAFLOW PROBLEM
154
155 There are several ways in which scanning looks just like the other
156 dataflow problems. It shares the all the mechanisms for local info
157 as well as basic block info. Where it differs is when and how often
158 it gets run. It also has no need for the iterative solver.
159 ----------------------------------------------------------------------------*/
160
161 /* Problem data for the scanning dataflow function. */
162 struct df_scan_problem_data
163 {
164 alloc_pool ref_pool;
165 alloc_pool insn_pool;
166 alloc_pool reg_pool;
167 alloc_pool mw_reg_pool;
168 alloc_pool mw_link_pool;
169 bitmap_obstack reg_bitmaps;
170 bitmap_obstack insn_bitmaps;
171 };
172
173 typedef struct df_scan_bb_info *df_scan_bb_info_t;
174
175 static void
176 df_scan_free_internal (void)
177 {
178 struct df_scan_problem_data *problem_data
179 = (struct df_scan_problem_data *) df_scan->problem_data;
180
181 free (df->def_info.refs);
182 free (df->def_info.begin);
183 free (df->def_info.count);
184 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
185
186 free (df->use_info.refs);
187 free (df->use_info.begin);
188 free (df->use_info.count);
189 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
190
191 free (df->def_regs);
192 df->def_regs = NULL;
193 free (df->use_regs);
194 df->use_regs = NULL;
195 free (df->eq_use_regs);
196 df->eq_use_regs = NULL;
197 df->regs_size = 0;
198 DF_REG_SIZE(df) = 0;
199
200 free (df->insns);
201 df->insns = NULL;
202 DF_INSN_SIZE () = 0;
203
204 free (df_scan->block_info);
205 df_scan->block_info = NULL;
206 df_scan->block_info_size = 0;
207
208 BITMAP_FREE (df->hardware_regs_used);
209 BITMAP_FREE (df->regular_block_artificial_uses);
210 BITMAP_FREE (df->eh_block_artificial_uses);
211 BITMAP_FREE (df->entry_block_defs);
212 BITMAP_FREE (df->exit_block_uses);
213 BITMAP_FREE (df->insns_to_delete);
214 BITMAP_FREE (df->insns_to_rescan);
215 BITMAP_FREE (df->insns_to_notes_rescan);
216
217 free_alloc_pool (df_scan->block_pool);
218 free_alloc_pool (problem_data->ref_pool);
219 free_alloc_pool (problem_data->insn_pool);
220 free_alloc_pool (problem_data->reg_pool);
221 free_alloc_pool (problem_data->mw_reg_pool);
222 free_alloc_pool (problem_data->mw_link_pool);
223 bitmap_obstack_release (&problem_data->reg_bitmaps);
224 bitmap_obstack_release (&problem_data->insn_bitmaps);
225 free (df_scan->problem_data);
226 }
227
228
229 /* Set basic block info. */
230
231 static void
232 df_scan_set_bb_info (unsigned int index,
233 struct df_scan_bb_info *bb_info)
234 {
235 gcc_assert (df_scan);
236 df_grow_bb_info (df_scan);
237 df_scan->block_info[index] = (void *) bb_info;
238 }
239
240
241 /* Free basic block info. */
242
243 static void
244 df_scan_free_bb_info (basic_block bb, void *vbb_info)
245 {
246 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
247 unsigned int bb_index = bb->index;
248 if (bb_info)
249 {
250 rtx insn;
251 FOR_BB_INSNS (bb, insn)
252 {
253 if (INSN_P (insn))
254 /* Record defs within INSN. */
255 df_insn_delete (bb, INSN_UID (insn));
256 }
257
258 if (bb_index < df_scan->block_info_size)
259 bb_info = df_scan_get_bb_info (bb_index);
260
261 /* Get rid of any artificial uses or defs. */
262 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
263 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
264 df_ref_chain_delete (bb_info->artificial_defs);
265 df_ref_chain_delete (bb_info->artificial_uses);
266 bb_info->artificial_defs = NULL;
267 bb_info->artificial_uses = NULL;
268 pool_free (df_scan->block_pool, bb_info);
269 }
270 }
271
272
273 /* Allocate the problem data for the scanning problem. This should be
274 called when the problem is created or when the entire function is to
275 be rescanned. */
276 void
277 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
278 {
279 struct df_scan_problem_data *problem_data;
280 unsigned int insn_num = get_max_uid () + 1;
281 unsigned int block_size = 400;
282 basic_block bb;
283
284 /* Given the number of pools, this is really faster than tearing
285 everything apart. */
286 if (df_scan->problem_data)
287 df_scan_free_internal ();
288
289 df_scan->block_pool
290 = create_alloc_pool ("df_scan_block pool",
291 sizeof (struct df_scan_bb_info),
292 block_size);
293
294 problem_data = XNEW (struct df_scan_problem_data);
295 df_scan->problem_data = problem_data;
296 df_scan->computed = true;
297
298 problem_data->ref_pool
299 = create_alloc_pool ("df_scan_ref pool",
300 sizeof (struct df_ref), block_size);
301 problem_data->insn_pool
302 = create_alloc_pool ("df_scan_insn pool",
303 sizeof (struct df_insn_info), block_size);
304 problem_data->reg_pool
305 = create_alloc_pool ("df_scan_reg pool",
306 sizeof (struct df_reg_info), block_size);
307 problem_data->mw_reg_pool
308 = create_alloc_pool ("df_scan_mw_reg pool",
309 sizeof (struct df_mw_hardreg), block_size);
310 problem_data->mw_link_pool
311 = create_alloc_pool ("df_scan_mw_link pool",
312 sizeof (struct df_link), block_size);
313
314 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
315 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
316
317 insn_num += insn_num / 4;
318 df_grow_reg_info ();
319
320 df_grow_insn_info ();
321 df_grow_bb_info (df_scan);
322
323 FOR_ALL_BB (bb)
324 {
325 unsigned int bb_index = bb->index;
326 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
327 if (!bb_info)
328 {
329 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
330 df_scan_set_bb_info (bb_index, bb_info);
331 }
332 bb_info->artificial_defs = NULL;
333 bb_info->artificial_uses = NULL;
334 }
335
336 df->hardware_regs_used = BITMAP_ALLOC (&problem_data->reg_bitmaps);
337 df->regular_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
338 df->eh_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
339 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
340 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
341 df->insns_to_delete = BITMAP_ALLOC (&problem_data->insn_bitmaps);
342 df->insns_to_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
343 df->insns_to_notes_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
344 df_scan->optional_p = false;
345 }
346
347
348 /* Free all of the data associated with the scan problem. */
349
350 static void
351 df_scan_free (void)
352 {
353 if (df_scan->problem_data)
354 df_scan_free_internal ();
355
356 if (df->blocks_to_analyze)
357 {
358 BITMAP_FREE (df->blocks_to_analyze);
359 df->blocks_to_analyze = NULL;
360 }
361
362 free (df_scan);
363 }
364
365 /* Dump the preamble for DF_SCAN dump. */
366 static void
367 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
368 {
369 int i;
370
371 fprintf (file, ";; invalidated by call \t");
372 df_print_regset (file, df_invalidated_by_call);
373 fprintf (file, ";; hardware regs used \t");
374 df_print_regset (file, df->hardware_regs_used);
375 fprintf (file, ";; regular block artificial uses \t");
376 df_print_regset (file, df->regular_block_artificial_uses);
377 fprintf (file, ";; eh block artificial uses \t");
378 df_print_regset (file, df->eh_block_artificial_uses);
379 fprintf (file, ";; entry block defs \t");
380 df_print_regset (file, df->entry_block_defs);
381 fprintf (file, ";; exit block uses \t");
382 df_print_regset (file, df->exit_block_uses);
383 fprintf (file, ";; regs ever live \t");
384 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
385 if (df_regs_ever_live_p (i))
386 fprintf (file, " %d[%s]", i, reg_names[i]);
387
388 fprintf (file, "\n");
389 }
390
391 /* Dump the bb_info for a given basic block. */
392 static void
393 df_scan_start_block (basic_block bb, FILE *file)
394 {
395 struct df_scan_bb_info *bb_info
396 = df_scan_get_bb_info (bb->index);
397
398 if (bb_info)
399 {
400 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
401 df_refs_chain_dump (bb_info->artificial_defs, true, file);
402 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
403 df_refs_chain_dump (bb_info->artificial_uses, true, file);
404 fprintf (file, "\n");
405 }
406 #if 0
407 {
408 rtx insn;
409 FOR_BB_INSNS (bb, insn)
410 if (INSN_P (insn))
411 df_insn_debug (insn, false, file);
412 }
413 #endif
414 }
415
416 static struct df_problem problem_SCAN =
417 {
418 DF_SCAN, /* Problem id. */
419 DF_NONE, /* Direction. */
420 df_scan_alloc, /* Allocate the problem specific data. */
421 NULL, /* Reset global information. */
422 df_scan_free_bb_info, /* Free basic block info. */
423 NULL, /* Local compute function. */
424 NULL, /* Init the solution specific data. */
425 NULL, /* Iterative solver. */
426 NULL, /* Confluence operator 0. */
427 NULL, /* Confluence operator n. */
428 NULL, /* Transfer function. */
429 NULL, /* Finalize function. */
430 df_scan_free, /* Free all of the problem information. */
431 NULL, /* Remove this problem from the stack of dataflow problems. */
432 df_scan_start_dump, /* Debugging. */
433 df_scan_start_block, /* Debugging start block. */
434 NULL, /* Debugging end block. */
435 NULL, /* Incremental solution verify start. */
436 NULL, /* Incremental solution verify end. */
437 NULL, /* Dependent problem. */
438 TV_DF_SCAN, /* Timing variable. */
439 false /* Reset blocks on dropping out of blocks_to_analyze. */
440 };
441
442
443 /* Create a new DATAFLOW instance and add it to an existing instance
444 of DF. The returned structure is what is used to get at the
445 solution. */
446
447 void
448 df_scan_add_problem (void)
449 {
450 df_add_problem (&problem_SCAN);
451 }
452
453 \f
454 /*----------------------------------------------------------------------------
455 Storage Allocation Utilities
456 ----------------------------------------------------------------------------*/
457
458
459 /* First, grow the reg_info information. If the current size is less than
460 the number of psuedos, grow to 25% more than the number of
461 pseudos.
462
463 Second, assure that all of the slots up to max_reg_num have been
464 filled with reg_info structures. */
465
466 void
467 df_grow_reg_info (void)
468 {
469 unsigned int max_reg = max_reg_num ();
470 unsigned int new_size = max_reg;
471 struct df_scan_problem_data *problem_data
472 = (struct df_scan_problem_data *) df_scan->problem_data;
473 unsigned int i;
474
475 if (df->regs_size < new_size)
476 {
477 new_size += new_size / 4;
478 df->def_regs = xrealloc (df->def_regs,
479 new_size *sizeof (struct df_reg_info*));
480 df->use_regs = xrealloc (df->use_regs,
481 new_size *sizeof (struct df_reg_info*));
482 df->eq_use_regs = xrealloc (df->eq_use_regs,
483 new_size *sizeof (struct df_reg_info*));
484 df->def_info.begin = xrealloc (df->def_info.begin,
485 new_size *sizeof (int));
486 df->def_info.count = xrealloc (df->def_info.count,
487 new_size *sizeof (int));
488 df->use_info.begin = xrealloc (df->use_info.begin,
489 new_size *sizeof (int));
490 df->use_info.count = xrealloc (df->use_info.count,
491 new_size *sizeof (int));
492 df->regs_size = new_size;
493 }
494
495 for (i = df->regs_inited; i < max_reg; i++)
496 {
497 struct df_reg_info *reg_info;
498
499 reg_info = pool_alloc (problem_data->reg_pool);
500 memset (reg_info, 0, sizeof (struct df_reg_info));
501 df->def_regs[i] = reg_info;
502 reg_info = pool_alloc (problem_data->reg_pool);
503 memset (reg_info, 0, sizeof (struct df_reg_info));
504 df->use_regs[i] = reg_info;
505 reg_info = pool_alloc (problem_data->reg_pool);
506 memset (reg_info, 0, sizeof (struct df_reg_info));
507 df->eq_use_regs[i] = reg_info;
508 df->def_info.begin[i] = 0;
509 df->def_info.count[i] = 0;
510 df->use_info.begin[i] = 0;
511 df->use_info.count[i] = 0;
512 }
513
514 df->regs_inited = max_reg;
515 }
516
517
518 /* Grow the ref information. */
519
520 static void
521 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
522 {
523 if (ref_info->refs_size < new_size)
524 {
525 ref_info->refs = xrealloc (ref_info->refs,
526 new_size *sizeof (struct df_ref *));
527 memset (ref_info->refs + ref_info->refs_size, 0,
528 (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
529 ref_info->refs_size = new_size;
530 }
531 }
532
533
534 /* Check and grow the ref information if necessary. This routine
535 guarantees total_size + BITMAP_ADDEND amount of entries in refs
536 array. It updates ref_info->refs_size only and does not change
537 ref_info->total_size. */
538
539 static void
540 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
541 unsigned bitmap_addend)
542 {
543 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
544 {
545 int new_size = ref_info->total_size + bitmap_addend;
546 new_size += ref_info->total_size / 4;
547 df_grow_ref_info (ref_info, new_size);
548 }
549 }
550
551
552 /* Grow the ref information. If the current size is less than the
553 number of instructions, grow to 25% more than the number of
554 instructions. */
555
556 void
557 df_grow_insn_info (void)
558 {
559 unsigned int new_size = get_max_uid () + 1;
560 if (DF_INSN_SIZE () < new_size)
561 {
562 new_size += new_size / 4;
563 df->insns = xrealloc (df->insns,
564 new_size *sizeof (struct df_insn_info *));
565 memset (df->insns + df->insns_size, 0,
566 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
567 DF_INSN_SIZE () = new_size;
568 }
569 }
570
571
572
573 \f
574 /*----------------------------------------------------------------------------
575 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
576 ----------------------------------------------------------------------------*/
577
578 /* Rescan all of the block_to_analyze or all of the blocks in the
579 function if df_set_blocks if blocks_to_analyze is NULL; */
580
581 void
582 df_scan_blocks (void)
583 {
584 basic_block bb;
585
586 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
587 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
588
589 df_get_regular_block_artificial_uses (df->regular_block_artificial_uses);
590 df_get_eh_block_artificial_uses (df->eh_block_artificial_uses);
591
592 bitmap_ior_into (df->eh_block_artificial_uses,
593 df->regular_block_artificial_uses);
594
595 /* ENTRY and EXIT blocks have special defs/uses. */
596 df_get_entry_block_def_set (df->entry_block_defs);
597 df_record_entry_block_defs (df->entry_block_defs);
598 df_get_exit_block_use_set (df->exit_block_uses);
599 df_record_exit_block_uses (df->exit_block_uses);
600 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
601 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
602
603 /* Regular blocks */
604 FOR_EACH_BB (bb)
605 {
606 unsigned int bb_index = bb->index;
607 df_bb_refs_record (bb_index, true);
608 }
609 }
610
611
612 /* Create a new ref of type DF_REF_TYPE for register REG at address
613 LOC within INSN of BB. */
614
615 struct df_ref *
616 df_ref_create (rtx reg, rtx *loc, rtx insn,
617 basic_block bb,
618 enum df_ref_type ref_type,
619 enum df_ref_flags ref_flags)
620 {
621 struct df_ref *ref;
622 struct df_reg_info **reg_info;
623 struct df_ref_info *ref_info;
624 struct df_ref **ref_rec;
625 struct df_ref ***ref_rec_ptr;
626 unsigned int count = 0;
627 bool add_to_table;
628
629 df_grow_reg_info ();
630
631 /* You cannot hack artificial refs. */
632 gcc_assert (insn);
633 ref = df_ref_create_structure (NULL, reg, loc, bb, insn,
634 ref_type, ref_flags);
635
636 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
637 {
638 reg_info = df->def_regs;
639 ref_info = &df->def_info;
640 ref_rec_ptr = &DF_INSN_DEFS (insn);
641 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
642 }
643 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
644 {
645 reg_info = df->eq_use_regs;
646 ref_info = &df->use_info;
647 ref_rec_ptr = &DF_INSN_EQ_USES (insn);
648 switch (ref_info->ref_order)
649 {
650 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
651 case DF_REF_ORDER_BY_REG_WITH_NOTES:
652 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
653 add_to_table = true;
654 break;
655 default:
656 add_to_table = false;
657 break;
658 }
659 }
660 else
661 {
662 reg_info = df->use_regs;
663 ref_info = &df->use_info;
664 ref_rec_ptr = &DF_INSN_USES (insn);
665 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
666 }
667
668 /* Do not add if ref is not in the right blocks. */
669 if (add_to_table && df->analyze_subset)
670 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
671
672 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
673
674 if (add_to_table)
675 switch (ref_info->ref_order)
676 {
677 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
678 case DF_REF_ORDER_BY_REG_WITH_NOTES:
679 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
680 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
681 break;
682 default:
683 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
684 break;
685 }
686
687 ref_rec = *ref_rec_ptr;
688 while (*ref_rec)
689 {
690 count++;
691 ref_rec++;
692 }
693
694 ref_rec = *ref_rec_ptr;
695 if (count)
696 {
697 ref_rec = xrealloc (ref_rec, (count+2) * sizeof (struct df_ref*));
698 *ref_rec_ptr = ref_rec;
699 ref_rec[count] = ref;
700 ref_rec[count+1] = NULL;
701 qsort (ref_rec, count + 1, sizeof (struct df_ref *), df_ref_compare);
702 }
703 else
704 {
705 struct df_ref **ref_rec = XNEWVEC (struct df_ref*, 2);
706 ref_rec[0] = ref;
707 ref_rec[1] = NULL;
708 *ref_rec_ptr = ref_rec;
709 }
710
711 #if 0
712 if (dump_file)
713 {
714 fprintf (dump_file, "adding ref ");
715 df_ref_debug (ref, dump_file);
716 }
717 #endif
718 /* By adding the ref directly, df_insn_rescan my not find any
719 differences even though the block will have changed. So we need
720 to mark the block dirty ourselves. */
721 df_set_bb_dirty (bb);
722
723 return ref;
724 }
725
726
727 \f
728 /*----------------------------------------------------------------------------
729 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
730 ----------------------------------------------------------------------------*/
731
732
733 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
734 Also delete the def-use or use-def chain if it exists. */
735
736 static void
737 df_reg_chain_unlink (struct df_ref *ref)
738 {
739 struct df_ref *next = DF_REF_NEXT_REG (ref);
740 struct df_ref *prev = DF_REF_PREV_REG (ref);
741 struct df_scan_problem_data *problem_data
742 = (struct df_scan_problem_data *) df_scan->problem_data;
743 int id = DF_REF_ID (ref);
744 struct df_reg_info *reg_info;
745 struct df_ref **refs = NULL;
746
747 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
748 {
749 reg_info = DF_REG_DEF_GET (DF_REF_REGNO (ref));
750 refs = df->def_info.refs;
751 }
752 else
753 {
754 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
755 {
756 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
757 switch (df->use_info.ref_order)
758 {
759 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
760 case DF_REF_ORDER_BY_REG_WITH_NOTES:
761 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
762 refs = df->use_info.refs;
763 break;
764 default:
765 break;
766 }
767 }
768 else
769 {
770 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
771 refs = df->use_info.refs;
772 }
773 }
774
775 if (refs)
776 {
777 if (df->analyze_subset)
778 {
779 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BB (ref)->index))
780 refs[id] = NULL;
781 }
782 else
783 refs[id] = NULL;
784 }
785
786 /* Delete any def-use or use-def chains that start here. It is
787 possible that there is trash in this field. This happens for
788 insns that have been deleted when rescanning has been deferred
789 and the chain problem has also been deleted. The chain tear down
790 code skips deleted insns. */
791 if (df_chain && DF_REF_CHAIN (ref))
792 df_chain_unlink (ref);
793
794 reg_info->n_refs--;
795 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
796 {
797 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
798 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
799 }
800
801 /* Unlink from the reg chain. If there is no prev, this is the
802 first of the list. If not, just join the next and prev. */
803 if (prev)
804 DF_REF_NEXT_REG (prev) = next;
805 else
806 {
807 gcc_assert (reg_info->reg_chain == ref);
808 reg_info->reg_chain = next;
809 }
810 if (next)
811 DF_REF_PREV_REG (next) = prev;
812
813 pool_free (problem_data->ref_pool, ref);
814 }
815
816
817 /* Remove REF from VEC. */
818
819 static void
820 df_ref_compress_rec (struct df_ref ***vec_ptr, struct df_ref *ref)
821 {
822 struct df_ref **vec = *vec_ptr;
823
824 if (vec[1])
825 {
826 while (*vec && *vec != ref)
827 vec++;
828
829 while (*vec)
830 {
831 *vec = *(vec+1);
832 vec++;
833 }
834 }
835 else
836 {
837 free (vec);
838 *vec_ptr = df_null_ref_rec;
839 }
840 }
841
842
843 /* Unlink REF from all def-use/use-def chains, etc. */
844
845 void
846 df_ref_remove (struct df_ref *ref)
847 {
848 #if 0
849 if (dump_file)
850 {
851 fprintf (dump_file, "removing ref ");
852 df_ref_debug (ref, dump_file);
853 }
854 #endif
855
856 if (DF_REF_REG_DEF_P (ref))
857 {
858 if (DF_REF_IS_ARTIFICIAL (ref))
859 {
860 struct df_scan_bb_info *bb_info
861 = df_scan_get_bb_info (DF_REF_BB (ref)->index);
862 df_ref_compress_rec (&bb_info->artificial_defs, ref);
863 }
864 else
865 {
866 unsigned int uid = DF_REF_INSN_UID (ref);
867 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
868 df_ref_compress_rec (&insn_rec->defs, ref);
869 }
870 }
871 else
872 {
873 if (DF_REF_IS_ARTIFICIAL (ref))
874 {
875 struct df_scan_bb_info *bb_info
876 = df_scan_get_bb_info (DF_REF_BB (ref)->index);
877 df_ref_compress_rec (&bb_info->artificial_uses, ref);
878 }
879 else
880 {
881 unsigned int uid = DF_REF_INSN_UID (ref);
882 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
883
884 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
885 df_ref_compress_rec (&insn_rec->eq_uses, ref);
886 else
887 df_ref_compress_rec (&insn_rec->uses, ref);
888 }
889 }
890
891 /* By deleting the ref directly, df_insn_rescan my not find any
892 differences even though the block will have changed. So we need
893 to mark the block dirty ourselves. */
894 df_set_bb_dirty (DF_REF_BB (ref));
895 df_reg_chain_unlink (ref);
896 }
897
898
899 /* Create the insn record for INSN. If there was one there, zero it
900 out. */
901
902 struct df_insn_info *
903 df_insn_create_insn_record (rtx insn)
904 {
905 struct df_scan_problem_data *problem_data
906 = (struct df_scan_problem_data *) df_scan->problem_data;
907 struct df_insn_info *insn_rec;
908
909 df_grow_insn_info ();
910 insn_rec = DF_INSN_GET (insn);
911 if (!insn_rec)
912 {
913 insn_rec = pool_alloc (problem_data->insn_pool);
914 DF_INSN_SET (insn, insn_rec);
915 }
916 memset (insn_rec, 0, sizeof (struct df_insn_info));
917 insn_rec->insn = insn;
918 return insn_rec;
919 }
920
921
922 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
923
924 static void
925 df_ref_chain_delete_du_chain (struct df_ref **ref_rec)
926 {
927 while (*ref_rec)
928 {
929 struct df_ref *ref = *ref_rec;
930 /* CHAIN is allocated by DF_CHAIN. So make sure to
931 pass df_scan instance for the problem. */
932 if (DF_REF_CHAIN (ref))
933 df_chain_unlink (ref);
934 ref_rec++;
935 }
936 }
937
938
939 /* Delete all refs in the ref chain. */
940
941 static void
942 df_ref_chain_delete (struct df_ref **ref_rec)
943 {
944 struct df_ref **start = ref_rec;
945 while (*ref_rec)
946 {
947 df_reg_chain_unlink (*ref_rec);
948 ref_rec++;
949 }
950
951 /* If the list is empty, it has a special shared element that is not
952 to be deleted. */
953 if (*start)
954 free (start);
955 }
956
957
958 /* Delete the hardreg chain. */
959
960 static void
961 df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
962 {
963 struct df_scan_problem_data *problem_data;
964
965 if (!hardregs)
966 return;
967
968 problem_data = (struct df_scan_problem_data *) df_scan->problem_data;
969
970 while (*hardregs)
971 {
972 pool_free (problem_data->mw_reg_pool, *hardregs);
973 hardregs++;
974 }
975 }
976
977
978 /* Delete all of the refs information from INSN. BB must be passed in
979 except when called from df_process_deferred_rescans to mark the block
980 as dirty. */
981
982 void
983 df_insn_delete (basic_block bb, unsigned int uid)
984 {
985 struct df_insn_info *insn_info = NULL;
986 if (!df)
987 return;
988
989 df_grow_bb_info (df_scan);
990 df_grow_reg_info ();
991
992 /* The block must be marked as dirty now, rather than later as in
993 df_insn_rescan and df_notes_rescan because it may not be there at
994 rescanning time and the mark would blow up. */
995 if (bb)
996 df_set_bb_dirty (bb);
997
998 insn_info = DF_INSN_UID_SAFE_GET (uid);
999
1000 /* The client has deferred rescanning. */
1001 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1002 {
1003 if (insn_info)
1004 {
1005 bitmap_clear_bit (df->insns_to_rescan, uid);
1006 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1007 bitmap_set_bit (df->insns_to_delete, uid);
1008 }
1009 if (dump_file)
1010 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
1011 return;
1012 }
1013
1014 if (dump_file)
1015 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1016
1017 bitmap_clear_bit (df->insns_to_delete, uid);
1018 bitmap_clear_bit (df->insns_to_rescan, uid);
1019 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1020 if (insn_info)
1021 {
1022 struct df_scan_problem_data *problem_data
1023 = (struct df_scan_problem_data *) df_scan->problem_data;
1024
1025 /* In general, notes do not have the insn_info fields
1026 initialized. However, combine deletes insns by changing them
1027 to notes. How clever. So we cannot just check if it is a
1028 valid insn before short circuiting this code, we need to see
1029 if we actually initialized it. */
1030 if (insn_info->defs)
1031 {
1032 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1033
1034 if (df_chain)
1035 {
1036 df_ref_chain_delete_du_chain (insn_info->defs);
1037 df_ref_chain_delete_du_chain (insn_info->uses);
1038 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1039 }
1040
1041 df_ref_chain_delete (insn_info->defs);
1042 df_ref_chain_delete (insn_info->uses);
1043 df_ref_chain_delete (insn_info->eq_uses);
1044 }
1045 pool_free (problem_data->insn_pool, insn_info);
1046 DF_INSN_UID_SET (uid, NULL);
1047 }
1048 }
1049
1050
1051 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
1052
1053 static void
1054 df_free_collection_rec (struct df_collection_rec *collection_rec)
1055 {
1056 struct df_scan_problem_data *problem_data
1057 = (struct df_scan_problem_data *) df_scan->problem_data;
1058 struct df_ref **ref;
1059 struct df_mw_hardreg **mw;
1060
1061 if (collection_rec->def_vec)
1062 for (ref = collection_rec->def_vec; *ref; ref++)
1063 pool_free (problem_data->ref_pool, *ref);
1064 if (collection_rec->use_vec)
1065 for (ref = collection_rec->use_vec; *ref; ref++)
1066 pool_free (problem_data->ref_pool, *ref);
1067 if (collection_rec->eq_use_vec)
1068 for (ref = collection_rec->eq_use_vec; *ref; ref++)
1069 pool_free (problem_data->ref_pool, *ref);
1070 if (collection_rec->mw_vec)
1071 for (mw = collection_rec->mw_vec; *mw; mw++)
1072 pool_free (problem_data->mw_reg_pool, *mw);
1073 }
1074
1075
1076 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1077
1078 bool
1079 df_insn_rescan (rtx insn)
1080 {
1081 unsigned int uid = INSN_UID (insn);
1082 struct df_insn_info *insn_info = NULL;
1083 basic_block bb = BLOCK_FOR_INSN (insn);
1084 struct df_collection_rec collection_rec;
1085 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
1086 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
1087 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
1088 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
1089
1090 if ((!df) || (!INSN_P (insn)))
1091 return false;
1092
1093 if (!bb)
1094 {
1095 if (dump_file)
1096 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1097 return false;
1098 }
1099
1100 /* The client has disabled rescanning and plans to do it itself. */
1101 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1102 return false;
1103
1104 df_grow_bb_info (df_scan);
1105 df_grow_reg_info ();
1106
1107 insn_info = DF_INSN_UID_SAFE_GET (uid);
1108
1109 /* The client has deferred rescanning. */
1110 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1111 {
1112 if (!insn_info)
1113 {
1114 insn_info = df_insn_create_insn_record (insn);
1115 insn_info->defs = df_null_ref_rec;
1116 insn_info->uses = df_null_ref_rec;
1117 insn_info->eq_uses = df_null_ref_rec;
1118 insn_info->mw_hardregs = df_null_mw_rec;
1119 }
1120 if (dump_file)
1121 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1122
1123 bitmap_clear_bit (df->insns_to_delete, uid);
1124 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1125 bitmap_set_bit (df->insns_to_rescan, INSN_UID (insn));
1126 return false;
1127 }
1128
1129 bitmap_clear_bit (df->insns_to_delete, uid);
1130 bitmap_clear_bit (df->insns_to_rescan, uid);
1131 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1132 if (insn_info)
1133 {
1134 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1135 /* If there's no change, return false. */
1136 if (the_same)
1137 {
1138 df_free_collection_rec (&collection_rec);
1139 if (dump_file)
1140 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1141 return false;
1142 }
1143 if (dump_file)
1144 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1145
1146 /* There's change - we need to delete the existing info. */
1147 df_insn_delete (NULL, uid);
1148 df_insn_create_insn_record (insn);
1149 }
1150 else
1151 {
1152 df_insn_create_insn_record (insn);
1153 df_insn_refs_collect (&collection_rec, bb, insn);
1154 if (dump_file)
1155 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1156 }
1157
1158 df_refs_add_to_chains (&collection_rec, bb, insn);
1159 df_set_bb_dirty (bb);
1160 return true;
1161 }
1162
1163
1164 /* Rescan all of the insns in the function. Note that the artificial
1165 uses and defs are not touched. This function will destroy def-se
1166 or use-def chains. */
1167
1168 void
1169 df_insn_rescan_all (void)
1170 {
1171 bool no_insn_rescan = false;
1172 bool defer_insn_rescan = false;
1173 basic_block bb;
1174 bitmap_iterator bi;
1175 unsigned int uid;
1176 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1177
1178 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1179 {
1180 df_clear_flags (DF_NO_INSN_RESCAN);
1181 no_insn_rescan = true;
1182 }
1183
1184 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1185 {
1186 df_clear_flags (DF_DEFER_INSN_RESCAN);
1187 defer_insn_rescan = true;
1188 }
1189
1190 bitmap_copy (tmp, df->insns_to_delete);
1191 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1192 {
1193 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1194 if (insn_info)
1195 df_insn_delete (NULL, uid);
1196 }
1197
1198 BITMAP_FREE (tmp);
1199 bitmap_clear (df->insns_to_delete);
1200 bitmap_clear (df->insns_to_rescan);
1201 bitmap_clear (df->insns_to_notes_rescan);
1202
1203 FOR_EACH_BB (bb)
1204 {
1205 rtx insn;
1206 FOR_BB_INSNS (bb, insn)
1207 {
1208 df_insn_rescan (insn);
1209 }
1210 }
1211
1212 if (no_insn_rescan)
1213 df_set_flags (DF_NO_INSN_RESCAN);
1214 if (defer_insn_rescan)
1215 df_set_flags (DF_DEFER_INSN_RESCAN);
1216 }
1217
1218
1219 /* Process all of the deferred rescans or deletions. */
1220
1221 void
1222 df_process_deferred_rescans (void)
1223 {
1224 bool no_insn_rescan = false;
1225 bool defer_insn_rescan = false;
1226 bitmap_iterator bi;
1227 unsigned int uid;
1228 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1229
1230 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1231 {
1232 df_clear_flags (DF_NO_INSN_RESCAN);
1233 no_insn_rescan = true;
1234 }
1235
1236 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1237 {
1238 df_clear_flags (DF_DEFER_INSN_RESCAN);
1239 defer_insn_rescan = true;
1240 }
1241
1242 if (dump_file)
1243 fprintf (dump_file, "starting the processing of deferred insns\n");
1244
1245 bitmap_copy (tmp, df->insns_to_delete);
1246 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1247 {
1248 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1249 if (insn_info)
1250 df_insn_delete (NULL, uid);
1251 }
1252
1253 bitmap_copy (tmp, df->insns_to_rescan);
1254 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1255 {
1256 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1257 if (insn_info)
1258 df_insn_rescan (insn_info->insn);
1259 }
1260
1261 bitmap_copy (tmp, df->insns_to_notes_rescan);
1262 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1263 {
1264 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1265 if (insn_info)
1266 df_notes_rescan (insn_info->insn);
1267 }
1268
1269 if (dump_file)
1270 fprintf (dump_file, "ending the processing of deferred insns\n");
1271
1272 BITMAP_FREE (tmp);
1273 bitmap_clear (df->insns_to_delete);
1274 bitmap_clear (df->insns_to_rescan);
1275 bitmap_clear (df->insns_to_notes_rescan);
1276
1277 if (no_insn_rescan)
1278 df_set_flags (DF_NO_INSN_RESCAN);
1279 if (defer_insn_rescan)
1280 df_set_flags (DF_DEFER_INSN_RESCAN);
1281
1282 /* If someone changed regs_ever_live during this pass, fix up the
1283 entry and exit blocks. */
1284 if (df->redo_entry_and_exit)
1285 {
1286 df_update_entry_exit_and_calls ();
1287 df->redo_entry_and_exit = false;
1288 }
1289 }
1290
1291
1292 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1293 the uses if INCLUDE_USES. Include the eq_uses if
1294 INCLUDE_EQ_USES. */
1295
1296 static unsigned int
1297 df_count_refs (bool include_defs, bool include_uses,
1298 bool include_eq_uses)
1299 {
1300 unsigned int regno;
1301 int size = 0;
1302 unsigned int m = df->regs_inited;
1303
1304 for (regno = 0; regno < m; regno++)
1305 {
1306 if (include_defs)
1307 size += DF_REG_DEF_COUNT (regno);
1308 if (include_uses)
1309 size += DF_REG_USE_COUNT (regno);
1310 if (include_eq_uses)
1311 size += DF_REG_EQ_USE_COUNT (regno);
1312 }
1313 return size;
1314 }
1315
1316
1317 /* Take build ref table for either the uses or defs from the reg-use
1318 or reg-def chains. This version processes the refs in reg order
1319 which is likely to be best if processing the whole function. */
1320
1321 static void
1322 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1323 bool include_defs,
1324 bool include_uses,
1325 bool include_eq_uses)
1326 {
1327 unsigned int m = df->regs_inited;
1328 unsigned int regno;
1329 unsigned int offset = 0;
1330 unsigned int start;
1331
1332 if (df->changeable_flags & DF_NO_HARD_REGS)
1333 {
1334 start = FIRST_PSEUDO_REGISTER;
1335 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1336 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1337 }
1338 else
1339 start = 0;
1340
1341 ref_info->total_size
1342 = df_count_refs (include_defs, include_uses, include_eq_uses);
1343
1344 df_check_and_grow_ref_info (ref_info, 1);
1345
1346 for (regno = start; regno < m; regno++)
1347 {
1348 int count = 0;
1349 ref_info->begin[regno] = offset;
1350 if (include_defs)
1351 {
1352 struct df_ref *ref = DF_REG_DEF_CHAIN (regno);
1353 while (ref)
1354 {
1355 ref_info->refs[offset] = ref;
1356 DF_REF_ID (ref) = offset++;
1357 count++;
1358 ref = DF_REF_NEXT_REG (ref);
1359 gcc_assert (offset < ref_info->refs_size);
1360 }
1361 }
1362 if (include_uses)
1363 {
1364 struct df_ref *ref = DF_REG_USE_CHAIN (regno);
1365 while (ref)
1366 {
1367 ref_info->refs[offset] = ref;
1368 DF_REF_ID (ref) = offset++;
1369 count++;
1370 ref = DF_REF_NEXT_REG (ref);
1371 gcc_assert (offset < ref_info->refs_size);
1372 }
1373 }
1374 if (include_eq_uses)
1375 {
1376 struct df_ref *ref = DF_REG_EQ_USE_CHAIN (regno);
1377 while (ref)
1378 {
1379 ref_info->refs[offset] = ref;
1380 DF_REF_ID (ref) = offset++;
1381 count++;
1382 ref = DF_REF_NEXT_REG (ref);
1383 gcc_assert (offset < ref_info->refs_size);
1384 }
1385 }
1386 ref_info->count[regno] = count;
1387 }
1388
1389 /* The bitmap size is not decremented when refs are deleted. So
1390 reset it now that we have squished out all of the empty
1391 slots. */
1392 ref_info->table_size = offset;
1393 }
1394
1395
1396 /* Take build ref table for either the uses or defs from the reg-use
1397 or reg-def chains. This version processes the refs in insn order
1398 which is likely to be best if processing some segment of the
1399 function. */
1400
1401 static void
1402 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1403 bool include_defs,
1404 bool include_uses,
1405 bool include_eq_uses)
1406 {
1407 bitmap_iterator bi;
1408 unsigned int bb_index;
1409 unsigned int m = df->regs_inited;
1410 unsigned int offset = 0;
1411 unsigned int r;
1412 unsigned int start
1413 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1414
1415 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1416 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1417
1418 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1419 df_check_and_grow_ref_info (ref_info, 1);
1420
1421 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1422 {
1423 basic_block bb = BASIC_BLOCK (bb_index);
1424 rtx insn;
1425 struct df_ref **ref_rec;
1426
1427 if (include_defs)
1428 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1429 {
1430 unsigned int regno = DF_REF_REGNO (*ref_rec);
1431 ref_info->count[regno]++;
1432 }
1433 if (include_uses)
1434 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1435 {
1436 unsigned int regno = DF_REF_REGNO (*ref_rec);
1437 ref_info->count[regno]++;
1438 }
1439
1440 FOR_BB_INSNS (bb, insn)
1441 {
1442 if (INSN_P (insn))
1443 {
1444 unsigned int uid = INSN_UID (insn);
1445
1446 if (include_defs)
1447 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1448 {
1449 unsigned int regno = DF_REF_REGNO (*ref_rec);
1450 ref_info->count[regno]++;
1451 }
1452 if (include_uses)
1453 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1454 {
1455 unsigned int regno = DF_REF_REGNO (*ref_rec);
1456 ref_info->count[regno]++;
1457 }
1458 if (include_eq_uses)
1459 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1460 {
1461 unsigned int regno = DF_REF_REGNO (*ref_rec);
1462 ref_info->count[regno]++;
1463 }
1464 }
1465 }
1466 }
1467
1468 for (r = start; r < m; r++)
1469 {
1470 ref_info->begin[r] = offset;
1471 offset += ref_info->count[r];
1472 ref_info->count[r] = 0;
1473 }
1474
1475 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1476 {
1477 basic_block bb = BASIC_BLOCK (bb_index);
1478 rtx insn;
1479 struct df_ref **ref_rec;
1480
1481 if (include_defs)
1482 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1483 {
1484 struct df_ref *ref = *ref_rec;
1485 unsigned int regno = DF_REF_REGNO (ref);
1486 if (regno >= start)
1487 {
1488 unsigned int id
1489 = ref_info->begin[regno] + ref_info->count[regno]++;
1490 DF_REF_ID (ref) = id;
1491 ref_info->refs[id] = ref;
1492 }
1493 }
1494 if (include_uses)
1495 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1496 {
1497 struct df_ref *ref = *ref_rec;
1498 unsigned int regno = DF_REF_REGNO (ref);
1499 if (regno >= start)
1500 {
1501 unsigned int id
1502 = ref_info->begin[regno] + ref_info->count[regno]++;
1503 DF_REF_ID (ref) = id;
1504 ref_info->refs[id] = ref;
1505 }
1506 }
1507
1508 FOR_BB_INSNS (bb, insn)
1509 {
1510 if (INSN_P (insn))
1511 {
1512 unsigned int uid = INSN_UID (insn);
1513
1514 if (include_defs)
1515 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1516 {
1517 struct df_ref *ref = *ref_rec;
1518 unsigned int regno = DF_REF_REGNO (ref);
1519 if (regno >= start)
1520 {
1521 unsigned int id
1522 = ref_info->begin[regno] + ref_info->count[regno]++;
1523 DF_REF_ID (ref) = id;
1524 ref_info->refs[id] = ref;
1525 }
1526 }
1527 if (include_uses)
1528 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1529 {
1530 struct df_ref *ref = *ref_rec;
1531 unsigned int regno = DF_REF_REGNO (ref);
1532 if (regno >= start)
1533 {
1534 unsigned int id
1535 = ref_info->begin[regno] + ref_info->count[regno]++;
1536 DF_REF_ID (ref) = id;
1537 ref_info->refs[id] = ref;
1538 }
1539 }
1540 if (include_eq_uses)
1541 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1542 {
1543 struct df_ref *ref = *ref_rec;
1544 unsigned int regno = DF_REF_REGNO (ref);
1545 if (regno >= start)
1546 {
1547 unsigned int id
1548 = ref_info->begin[regno] + ref_info->count[regno]++;
1549 DF_REF_ID (ref) = id;
1550 ref_info->refs[id] = ref;
1551 }
1552 }
1553 }
1554 }
1555 }
1556
1557 /* The bitmap size is not decremented when refs are deleted. So
1558 reset it now that we have squished out all of the empty
1559 slots. */
1560
1561 ref_info->table_size = offset;
1562 }
1563
1564 /* Take build ref table for either the uses or defs from the reg-use
1565 or reg-def chains. */
1566
1567 static void
1568 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1569 bool include_defs,
1570 bool include_uses,
1571 bool include_eq_uses)
1572 {
1573 if (df->analyze_subset)
1574 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1575 include_uses, include_eq_uses);
1576 else
1577 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1578 include_uses, include_eq_uses);
1579 }
1580
1581
1582 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1583 static unsigned int
1584 df_add_refs_to_table (unsigned int offset,
1585 struct df_ref_info *ref_info,
1586 struct df_ref **ref_vec)
1587 {
1588 while (*ref_vec)
1589 {
1590 struct df_ref *ref = *ref_vec;
1591 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
1592 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1593 {
1594 ref_info->refs[offset] = ref;
1595 DF_REF_ID (*ref_vec) = offset++;
1596 }
1597 ref_vec++;
1598 }
1599 return offset;
1600 }
1601
1602
1603 /* Count the number of refs in all of the insns of BB. Include the
1604 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1605 eq_uses if INCLUDE_EQ_USES. */
1606
1607 static unsigned int
1608 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1609 struct df_ref_info *ref_info,
1610 bool include_defs, bool include_uses,
1611 bool include_eq_uses)
1612 {
1613 rtx insn;
1614
1615 if (include_defs)
1616 offset = df_add_refs_to_table (offset, ref_info,
1617 df_get_artificial_defs (bb->index));
1618 if (include_uses)
1619 offset = df_add_refs_to_table (offset, ref_info,
1620 df_get_artificial_uses (bb->index));
1621
1622 FOR_BB_INSNS (bb, insn)
1623 if (INSN_P (insn))
1624 {
1625 unsigned int uid = INSN_UID (insn);
1626 if (include_defs)
1627 offset = df_add_refs_to_table (offset, ref_info,
1628 DF_INSN_UID_DEFS (uid));
1629 if (include_uses)
1630 offset = df_add_refs_to_table (offset, ref_info,
1631 DF_INSN_UID_USES (uid));
1632 if (include_eq_uses)
1633 offset = df_add_refs_to_table (offset, ref_info,
1634 DF_INSN_UID_EQ_USES (uid));
1635 }
1636 return offset;
1637 }
1638
1639
1640 /* Organize the refs by insn into the table in REF_INFO. If
1641 blocks_to_analyze is defined, use that set, otherwise the entire
1642 program. Include the defs if INCLUDE_DEFS. Include the uses if
1643 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1644
1645 static void
1646 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1647 bool include_defs, bool include_uses,
1648 bool include_eq_uses)
1649 {
1650 basic_block bb;
1651 unsigned int offset = 0;
1652
1653 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1654 df_check_and_grow_ref_info (ref_info, 1);
1655 if (df->blocks_to_analyze)
1656 {
1657 bitmap_iterator bi;
1658 unsigned int index;
1659
1660 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1661 {
1662 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
1663 include_defs, include_uses,
1664 include_eq_uses);
1665 }
1666
1667 ref_info->table_size = offset;
1668 }
1669 else
1670 {
1671 FOR_ALL_BB (bb)
1672 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1673 include_defs, include_uses,
1674 include_eq_uses);
1675 ref_info->table_size = offset;
1676 }
1677 }
1678
1679
1680 /* If the use refs in DF are not organized, reorganize them. */
1681
1682 void
1683 df_maybe_reorganize_use_refs (enum df_ref_order order)
1684 {
1685 if (order == df->use_info.ref_order)
1686 return;
1687
1688 switch (order)
1689 {
1690 case DF_REF_ORDER_BY_REG:
1691 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1692 break;
1693
1694 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1695 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1696 break;
1697
1698 case DF_REF_ORDER_BY_INSN:
1699 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1700 break;
1701
1702 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1703 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1704 break;
1705
1706 case DF_REF_ORDER_NO_TABLE:
1707 free (df->use_info.refs);
1708 df->use_info.refs = NULL;
1709 df->use_info.refs_size = 0;
1710 break;
1711
1712 case DF_REF_ORDER_UNORDERED:
1713 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1714 gcc_unreachable ();
1715 break;
1716 }
1717
1718 df->use_info.ref_order = order;
1719 }
1720
1721
1722 /* If the def refs in DF are not organized, reorganize them. */
1723
1724 void
1725 df_maybe_reorganize_def_refs (enum df_ref_order order)
1726 {
1727 if (order == df->def_info.ref_order)
1728 return;
1729
1730 switch (order)
1731 {
1732 case DF_REF_ORDER_BY_REG:
1733 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1734 break;
1735
1736 case DF_REF_ORDER_BY_INSN:
1737 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1738 break;
1739
1740 case DF_REF_ORDER_NO_TABLE:
1741 free (df->def_info.refs);
1742 df->def_info.refs = NULL;
1743 df->def_info.refs_size = 0;
1744 break;
1745
1746 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1747 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1748 case DF_REF_ORDER_UNORDERED:
1749 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1750 gcc_unreachable ();
1751 break;
1752 }
1753
1754 df->def_info.ref_order = order;
1755 }
1756
1757
1758 /* Change the BB of all refs in the ref chain to NEW_BB.
1759 Assumes that all refs in the chain have the same BB.
1760 If changed, return the original bb the chain belonged to
1761 (or .
1762 If no change, return NEW_BB.
1763 If something's wrong, it will return NULL. */
1764
1765 static basic_block
1766 df_ref_chain_change_bb (struct df_ref **ref_rec,
1767 basic_block old_bb,
1768 basic_block new_bb)
1769 {
1770 while (*ref_rec)
1771 {
1772 struct df_ref *ref = *ref_rec;
1773
1774 if (DF_REF_BB (ref) == new_bb)
1775 return new_bb;
1776 else
1777 {
1778 gcc_assert (old_bb == NULL || DF_REF_BB (ref) == old_bb);
1779 old_bb = DF_REF_BB (ref);
1780 DF_REF_BB (ref) = new_bb;
1781 }
1782 ref_rec++;
1783 }
1784
1785 return old_bb;
1786 }
1787
1788
1789 /* Change all of the basic block references in INSN to use the insn's
1790 current basic block. This function is called from routines that move
1791 instructions from one block to another. */
1792
1793 void
1794 df_insn_change_bb (rtx insn)
1795 {
1796 basic_block new_bb = BLOCK_FOR_INSN (insn);
1797 basic_block old_bb = NULL;
1798 struct df_insn_info *insn_info;
1799 unsigned int uid = INSN_UID (insn);
1800
1801 if (!df)
1802 return;
1803
1804 if (dump_file)
1805 fprintf (dump_file, "changing bb of uid %d\n", uid);
1806
1807 insn_info = DF_INSN_UID_SAFE_GET (uid);
1808 if (insn_info == NULL)
1809 {
1810 if (dump_file)
1811 fprintf (dump_file, " unscanned insn\n");
1812 df_insn_rescan (insn);
1813 return;
1814 }
1815
1816 if (!INSN_P (insn))
1817 return;
1818
1819 old_bb = df_ref_chain_change_bb (insn_info->defs, old_bb, new_bb);
1820 if (old_bb == new_bb)
1821 return;
1822
1823 old_bb = df_ref_chain_change_bb (insn_info->uses, old_bb, new_bb);
1824 if (old_bb == new_bb)
1825 return;
1826
1827 old_bb = df_ref_chain_change_bb (insn_info->eq_uses, old_bb, new_bb);
1828 if (old_bb == new_bb)
1829 return;
1830
1831 df_set_bb_dirty (new_bb);
1832 if (old_bb)
1833 {
1834 if (dump_file)
1835 fprintf (dump_file, " from %d to %d\n",
1836 old_bb->index, new_bb->index);
1837 df_set_bb_dirty (old_bb);
1838 }
1839 else
1840 if (dump_file)
1841 fprintf (dump_file, " to %d\n", new_bb->index);
1842 }
1843
1844
1845 /* Helper function for df_ref_change_reg_with_loc. */
1846
1847 static void
1848 df_ref_change_reg_with_loc_1 (struct df_reg_info *old, struct df_reg_info *new,
1849 int new_regno, rtx loc)
1850 {
1851 struct df_ref *the_ref = old->reg_chain;
1852
1853 while (the_ref)
1854 {
1855 if (DF_REF_LOC(the_ref) && (*DF_REF_LOC(the_ref) == loc))
1856 {
1857 struct df_ref *next_ref = the_ref->next_reg;
1858 struct df_ref *prev_ref = the_ref->prev_reg;
1859 struct df_ref **ref_vec, **ref_vec_t;
1860 unsigned int count = 0;
1861
1862 DF_REF_REGNO (the_ref) = new_regno;
1863 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
1864
1865 /* Pull the_ref out of the old regno chain. */
1866 if (prev_ref)
1867 prev_ref->next_reg = next_ref;
1868 else
1869 old->reg_chain = next_ref;
1870 if (next_ref)
1871 next_ref->prev_reg = prev_ref;
1872 old->n_refs--;
1873
1874 /* Put the ref into the new regno chain. */
1875 the_ref->prev_reg = NULL;
1876 the_ref->next_reg = new->reg_chain;
1877 if (new->reg_chain)
1878 new->reg_chain->prev_reg = the_ref;
1879 new->reg_chain = the_ref;
1880 new->n_refs++;
1881 df_set_bb_dirty (DF_REF_BB (the_ref));
1882
1883 /* Need to resort the record that the ref was in because the
1884 regno is a sorting key. First, find the right record. */
1885 if (DF_REF_IS_ARTIFICIAL (the_ref))
1886 {
1887 unsigned int bb_index = DF_REF_BB (the_ref)->index;
1888 if (DF_REF_REG_DEF_P (the_ref))
1889 ref_vec = df_get_artificial_defs (bb_index);
1890 else
1891 ref_vec = df_get_artificial_uses (bb_index);
1892 }
1893 else
1894 {
1895 struct df_insn_info *insn_info
1896 = DF_INSN_GET (DF_REF_INSN (the_ref));
1897 if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
1898 ref_vec = insn_info->eq_uses;
1899 else
1900 ref_vec = insn_info->uses;
1901 if (dump_file)
1902 fprintf (dump_file, "changing reg in insn %d\n",
1903 INSN_UID (DF_REF_INSN (the_ref)));
1904 }
1905 ref_vec_t = ref_vec;
1906
1907 /* Find the length. */
1908 while (*ref_vec_t)
1909 {
1910 count++;
1911 ref_vec_t++;
1912 }
1913 qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
1914
1915 the_ref = next_ref;
1916 }
1917 else
1918 the_ref = the_ref->next_reg;
1919 }
1920 }
1921
1922
1923 /* Change the regno of all refs that contained LOC from OLD_REGNO to
1924 NEW_REGNO. Refs that do not match LOC are not changed. This call
1925 is to support the SET_REGNO macro. */
1926
1927 void
1928 df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
1929 {
1930 if ((!df) || (old_regno == -1) || (old_regno == new_regno))
1931 return;
1932
1933 df_grow_reg_info ();
1934
1935 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
1936 DF_REG_DEF_GET (new_regno), new_regno, loc);
1937 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
1938 DF_REG_USE_GET (new_regno), new_regno, loc);
1939 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
1940 DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
1941 }
1942
1943
1944 /* Delete the mw_hardregs that point into the eq_notes. */
1945
1946 static unsigned int
1947 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
1948 {
1949 struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
1950 unsigned int deleted = 0;
1951 unsigned int count = 0;
1952 struct df_scan_problem_data *problem_data
1953 = (struct df_scan_problem_data *) df_scan->problem_data;
1954
1955 if (!*mw_vec)
1956 return 0;
1957
1958 while (*mw_vec)
1959 {
1960 if ((*mw_vec)->flags & DF_REF_IN_NOTE)
1961 {
1962 struct df_mw_hardreg **temp_vec = mw_vec;
1963
1964 pool_free (problem_data->mw_reg_pool, *mw_vec);
1965 temp_vec = mw_vec;
1966 /* Shove the remaining ones down one to fill the gap. While
1967 this looks n**2, it is highly unusual to have any mw regs
1968 in eq_notes and the chances of more than one are almost
1969 non existent. */
1970 while (*temp_vec)
1971 {
1972 *temp_vec = *(temp_vec + 1);
1973 temp_vec++;
1974 }
1975 deleted++;
1976 }
1977 else
1978 {
1979 mw_vec++;
1980 count++;
1981 }
1982 }
1983
1984 if (count == 0)
1985 {
1986 free (insn_info->mw_hardregs);
1987 insn_info->mw_hardregs = df_null_mw_rec;
1988 return 0;
1989 }
1990 return deleted;
1991 }
1992
1993
1994 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
1995
1996 void
1997 df_notes_rescan (rtx insn)
1998 {
1999 struct df_insn_info *insn_info;
2000 unsigned int uid = INSN_UID (insn);
2001
2002 if (!df)
2003 return;
2004
2005 /* The client has disabled rescanning and plans to do it itself. */
2006 if (df->changeable_flags & DF_NO_INSN_RESCAN)
2007 return;
2008
2009 df_grow_bb_info (df_scan);
2010 df_grow_reg_info ();
2011
2012 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));
2013
2014 /* The client has deferred rescanning. */
2015 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
2016 {
2017 if (!insn_info)
2018 {
2019 insn_info = df_insn_create_insn_record (insn);
2020 insn_info->defs = df_null_ref_rec;
2021 insn_info->uses = df_null_ref_rec;
2022 insn_info->eq_uses = df_null_ref_rec;
2023 insn_info->mw_hardregs = df_null_mw_rec;
2024 }
2025
2026 bitmap_clear_bit (df->insns_to_delete, uid);
2027 /* If the insn is set to be rescanned, it does not need to also
2028 be notes rescanned. */
2029 if (!bitmap_bit_p (df->insns_to_rescan, uid))
2030 bitmap_set_bit (df->insns_to_notes_rescan, INSN_UID (insn));
2031 return;
2032 }
2033
2034 bitmap_clear_bit (df->insns_to_delete, uid);
2035 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
2036
2037 if (insn_info)
2038 {
2039 basic_block bb = BLOCK_FOR_INSN (insn);
2040 rtx note;
2041 struct df_collection_rec collection_rec;
2042 unsigned int num_deleted;
2043
2044 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
2045 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
2046 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 1000);
2047
2048 num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
2049 df_ref_chain_delete (insn_info->eq_uses);
2050 insn_info->eq_uses = NULL;
2051
2052 /* Process REG_EQUIV/REG_EQUAL notes */
2053 for (note = REG_NOTES (insn); note;
2054 note = XEXP (note, 1))
2055 {
2056 switch (REG_NOTE_KIND (note))
2057 {
2058 case REG_EQUIV:
2059 case REG_EQUAL:
2060 df_uses_record (&collection_rec,
2061 &XEXP (note, 0), DF_REF_REG_USE,
2062 bb, insn, DF_REF_IN_NOTE);
2063 default:
2064 break;
2065 }
2066 }
2067
2068 /* Find some place to put any new mw_hardregs. */
2069 df_canonize_collection_rec (&collection_rec);
2070 if (collection_rec.next_mw)
2071 {
2072 unsigned int count = 0;
2073 struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
2074 while (*mw_rec)
2075 {
2076 count++;
2077 mw_rec++;
2078 }
2079
2080 if (count)
2081 {
2082 /* Append to the end of the existing record after
2083 expanding it if necessary. */
2084 if (collection_rec.next_mw > num_deleted)
2085 {
2086 insn_info->mw_hardregs =
2087 xrealloc (insn_info->mw_hardregs,
2088 (count + 1 + collection_rec.next_mw)
2089 * sizeof (struct df_ref*));
2090 }
2091 memcpy (&insn_info->mw_hardregs[count], collection_rec.mw_vec,
2092 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2093 qsort (insn_info->mw_hardregs, count + collection_rec.next_mw,
2094 sizeof (struct df_mw_hardreg *), df_mw_compare);
2095 }
2096 else
2097 {
2098 /* No vector there. */
2099 insn_info->mw_hardregs
2100 = XNEWVEC (struct df_mw_hardreg*,
2101 count + 1 + collection_rec.next_mw);
2102 memcpy (insn_info->mw_hardregs, collection_rec.mw_vec,
2103 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2104 }
2105 }
2106 /* Get rid of the mw_rec so that df_refs_add_to_chains will
2107 ignore it. */
2108 collection_rec.mw_vec = NULL;
2109 collection_rec.next_mw = 0;
2110 df_refs_add_to_chains (&collection_rec, bb, insn);
2111 }
2112 else
2113 df_insn_rescan (insn);
2114
2115 }
2116
2117 \f
2118 /*----------------------------------------------------------------------------
2119 Hard core instruction scanning code. No external interfaces here,
2120 just a lot of routines that look inside insns.
2121 ----------------------------------------------------------------------------*/
2122
2123
2124 /* Return true if the contents of two df_ref's are identical.
2125 It ignores DF_REF_MARKER. */
2126
2127 static bool
2128 df_ref_equal_p (struct df_ref *ref1, struct df_ref *ref2)
2129 {
2130 if (!ref2)
2131 return false;
2132 return (ref1 == ref2) ||
2133 (DF_REF_REG (ref1) == DF_REF_REG (ref2)
2134 && DF_REF_REGNO (ref1) == DF_REF_REGNO (ref2)
2135 && DF_REF_LOC (ref1) == DF_REF_LOC (ref2)
2136 && DF_REF_INSN (ref1) == DF_REF_INSN (ref2)
2137 && DF_REF_TYPE (ref1) == DF_REF_TYPE (ref2)
2138 && ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2139 == (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2140 && DF_REF_BB (ref1) == DF_REF_BB (ref2));
2141 }
2142
2143
2144 /* Compare REF1 and REF2 for sorting. This is only called from places
2145 where all of the refs are of the same type, in the same insn, and
2146 have the same bb. So these fields are not checked. */
2147
2148 static int
2149 df_ref_compare (const void *r1, const void *r2)
2150 {
2151 const struct df_ref *const ref1 = *(const struct df_ref *const*)r1;
2152 const struct df_ref *const ref2 = *(const struct df_ref *const*)r2;
2153
2154 if (ref1 == ref2)
2155 return 0;
2156
2157 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2158 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2159
2160 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2161 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2162
2163 if ((DF_REF_REG (ref1) != DF_REF_REG (ref2))
2164 || (DF_REF_LOC (ref1) != DF_REF_LOC (ref2)))
2165 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2166
2167 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2168 {
2169 /* If two refs are identical except that one of them has is from
2170 a mw and one is not, we need to have the one with the mw
2171 first. */
2172 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2173 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2174 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2175 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2176 return -1;
2177 else
2178 return 1;
2179 }
2180 return 0;
2181 }
2182
2183 static void
2184 df_swap_refs (struct df_ref **ref_vec, int i, int j)
2185 {
2186 struct df_ref *tmp = ref_vec[i];
2187 ref_vec[i] = ref_vec[j];
2188 ref_vec[j] = tmp;
2189 }
2190
2191 /* Sort and compress a set of refs. */
2192
2193 static unsigned int
2194 df_sort_and_compress_refs (struct df_ref **ref_vec, unsigned int count)
2195 {
2196 struct df_scan_problem_data *problem_data
2197 = (struct df_scan_problem_data *) df_scan->problem_data;
2198 unsigned int i;
2199 unsigned int dist = 0;
2200
2201 ref_vec[count] = NULL;
2202 /* If there are 1 or 0 elements, there is nothing to do. */
2203 if (count < 2)
2204 return count;
2205 else if (count == 2)
2206 {
2207 if (df_ref_compare (&ref_vec[0], &ref_vec[1]) > 0)
2208 df_swap_refs (ref_vec, 0, 1);
2209 }
2210 else
2211 {
2212 for (i = 0; i < count - 1; i++)
2213 if (df_ref_compare (&ref_vec[i], &ref_vec[i+1]) >= 0)
2214 break;
2215 /* If the array is already strictly ordered,
2216 which is the most common case for large COUNT case
2217 (which happens for CALL INSNs),
2218 no need to sort and filter out duplicate.
2219 Simply return the count.
2220 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2221 of DF_REF_COMPARE. */
2222 if (i == count - 1)
2223 return count;
2224 qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
2225 }
2226
2227 for (i=0; i<count-dist; i++)
2228 {
2229 /* Find the next ref that is not equal to the current ref. */
2230 while (df_ref_equal_p (ref_vec[i], ref_vec[i + dist + 1]))
2231 {
2232 pool_free (problem_data->ref_pool, ref_vec[i + dist + 1]);
2233 dist++;
2234 }
2235 /* Copy it down to the next position. */
2236 if (dist)
2237 ref_vec[i+1] = ref_vec[i + dist + 1];
2238 }
2239
2240 count -= dist;
2241 ref_vec[count] = NULL;
2242 return count;
2243 }
2244
2245
2246 /* Return true if the contents of two df_ref's are identical.
2247 It ignores DF_REF_MARKER. */
2248
2249 static bool
2250 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2251 {
2252 if (!mw2)
2253 return false;
2254 return (mw1 == mw2) ||
2255 (mw1->mw_reg == mw2->mw_reg
2256 && mw1->type == mw2->type
2257 && mw1->flags == mw2->flags
2258 && mw1->start_regno == mw2->start_regno
2259 && mw1->end_regno == mw2->end_regno);
2260 }
2261
2262
2263 /* Compare MW1 and MW2 for sorting. */
2264
2265 static int
2266 df_mw_compare (const void *m1, const void *m2)
2267 {
2268 const struct df_mw_hardreg *const mw1 = *(const struct df_mw_hardreg *const*)m1;
2269 const struct df_mw_hardreg *const mw2 = *(const struct df_mw_hardreg *const*)m2;
2270
2271 if (mw1 == mw2)
2272 return 0;
2273
2274 if (mw1->type != mw2->type)
2275 return mw1->type - mw2->type;
2276
2277 if (mw1->flags != mw2->flags)
2278 return mw1->flags - mw2->flags;
2279
2280 if (mw1->start_regno != mw2->start_regno)
2281 return mw1->start_regno - mw2->start_regno;
2282
2283 if (mw1->end_regno != mw2->end_regno)
2284 return mw1->end_regno - mw2->end_regno;
2285
2286 if (mw1->mw_reg != mw2->mw_reg)
2287 return mw1->mw_order - mw2->mw_order;
2288
2289 return 0;
2290 }
2291
2292
2293 /* Sort and compress a set of refs. */
2294
2295 static unsigned int
2296 df_sort_and_compress_mws (struct df_mw_hardreg **mw_vec, unsigned int count)
2297 {
2298 struct df_scan_problem_data *problem_data
2299 = (struct df_scan_problem_data *) df_scan->problem_data;
2300 unsigned int i;
2301 unsigned int dist = 0;
2302 mw_vec[count] = NULL;
2303
2304 if (count < 2)
2305 return count;
2306 else if (count == 2)
2307 {
2308 if (df_mw_compare (&mw_vec[0], &mw_vec[1]) > 0)
2309 {
2310 struct df_mw_hardreg *tmp = mw_vec[0];
2311 mw_vec[0] = mw_vec[1];
2312 mw_vec[1] = tmp;
2313 }
2314 }
2315 else
2316 qsort (mw_vec, count, sizeof (struct df_mw_hardreg *), df_mw_compare);
2317
2318 for (i=0; i<count-dist; i++)
2319 {
2320 /* Find the next ref that is not equal to the current ref. */
2321 while (df_mw_equal_p (mw_vec[i], mw_vec[i + dist + 1]))
2322 {
2323 pool_free (problem_data->mw_reg_pool, mw_vec[i + dist + 1]);
2324 dist++;
2325 }
2326 /* Copy it down to the next position. */
2327 if (dist)
2328 mw_vec[i+1] = mw_vec[i + dist + 1];
2329 }
2330
2331 count -= dist;
2332 mw_vec[count] = NULL;
2333 return count;
2334 }
2335
2336
2337 /* Sort and remove duplicates from the COLLECTION_REC. */
2338
2339 static void
2340 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2341 {
2342 if (collection_rec->def_vec)
2343 collection_rec->next_def
2344 = df_sort_and_compress_refs (collection_rec->def_vec,
2345 collection_rec->next_def);
2346 if (collection_rec->use_vec)
2347 collection_rec->next_use
2348 = df_sort_and_compress_refs (collection_rec->use_vec,
2349 collection_rec->next_use);
2350 if (collection_rec->eq_use_vec)
2351 collection_rec->next_eq_use
2352 = df_sort_and_compress_refs (collection_rec->eq_use_vec,
2353 collection_rec->next_eq_use);
2354 if (collection_rec->mw_vec)
2355 collection_rec->next_mw
2356 = df_sort_and_compress_mws (collection_rec->mw_vec,
2357 collection_rec->next_mw);
2358 }
2359
2360
2361 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2362
2363 static void
2364 df_install_ref (struct df_ref *this_ref,
2365 struct df_reg_info *reg_info,
2366 struct df_ref_info *ref_info,
2367 bool add_to_table)
2368 {
2369 unsigned int regno = DF_REF_REGNO (this_ref);
2370 /* Add the ref to the reg_{def,use,eq_use} chain. */
2371 struct df_ref *head = reg_info->reg_chain;
2372
2373 reg_info->reg_chain = this_ref;
2374 reg_info->n_refs++;
2375
2376 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2377 {
2378 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2379 df->hard_regs_live_count[regno]++;
2380 }
2381
2382 gcc_assert (DF_REF_NEXT_REG (this_ref) == NULL);
2383 gcc_assert (DF_REF_PREV_REG (this_ref) == NULL);
2384
2385 DF_REF_NEXT_REG (this_ref) = head;
2386
2387 /* We cannot actually link to the head of the chain. */
2388 DF_REF_PREV_REG (this_ref) = NULL;
2389
2390 if (head)
2391 DF_REF_PREV_REG (head) = this_ref;
2392
2393 if (add_to_table)
2394 {
2395 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2396 df_check_and_grow_ref_info (ref_info, 1);
2397 DF_REF_ID (this_ref) = ref_info->table_size;
2398 /* Add the ref to the big array of defs. */
2399 ref_info->refs[ref_info->table_size] = this_ref;
2400 ref_info->table_size++;
2401 }
2402 else
2403 DF_REF_ID (this_ref) = -1;
2404
2405 ref_info->total_size++;
2406 }
2407
2408
2409 /* This function takes one of the groups of refs (defs, uses or
2410 eq_uses) and installs the entire group into the insn. It also adds
2411 each of these refs into the appropriate chains. */
2412
2413 static struct df_ref **
2414 df_install_refs (basic_block bb,
2415 struct df_ref **old_vec, unsigned int count,
2416 struct df_reg_info **reg_info,
2417 struct df_ref_info *ref_info,
2418 bool is_notes)
2419 {
2420 if (count)
2421 {
2422 unsigned int i;
2423 struct df_ref **new_vec = XNEWVEC (struct df_ref*, count + 1);
2424 bool add_to_table;
2425
2426 switch (ref_info->ref_order)
2427 {
2428 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2429 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2430 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2431 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2432 add_to_table = true;
2433 break;
2434 case DF_REF_ORDER_UNORDERED:
2435 case DF_REF_ORDER_BY_REG:
2436 case DF_REF_ORDER_BY_INSN:
2437 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2438 add_to_table = !is_notes;
2439 break;
2440 default:
2441 add_to_table = false;
2442 break;
2443 }
2444
2445 /* Do not add if ref is not in the right blocks. */
2446 if (add_to_table && df->analyze_subset)
2447 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2448
2449 for (i = 0; i < count; i++)
2450 {
2451 struct df_ref *this_ref = old_vec[i];
2452 new_vec[i] = this_ref;
2453 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2454 ref_info, add_to_table);
2455 }
2456
2457 new_vec[count] = NULL;
2458 return new_vec;
2459 }
2460 else
2461 return df_null_ref_rec;
2462 }
2463
2464
2465 /* This function takes the mws installs the entire group into the
2466 insn. */
2467
2468 static struct df_mw_hardreg **
2469 df_install_mws (struct df_mw_hardreg **old_vec, unsigned int count)
2470 {
2471 if (count)
2472 {
2473 struct df_mw_hardreg **new_vec
2474 = XNEWVEC (struct df_mw_hardreg*, count + 1);
2475 memcpy (new_vec, old_vec,
2476 sizeof (struct df_mw_hardreg*) * (count + 1));
2477 return new_vec;
2478 }
2479 else
2480 return df_null_mw_rec;
2481 }
2482
2483
2484 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2485 chains and update other necessary information. */
2486
2487 static void
2488 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2489 basic_block bb, rtx insn)
2490 {
2491 if (insn)
2492 {
2493 struct df_insn_info *insn_rec = DF_INSN_GET (insn);
2494 /* If there is a vector in the collection rec, add it to the
2495 insn. A null rec is a signal that the caller will handle the
2496 chain specially. */
2497 if (collection_rec->def_vec)
2498 {
2499 if (insn_rec->defs && *insn_rec->defs)
2500 free (insn_rec->defs);
2501 insn_rec->defs
2502 = df_install_refs (bb, collection_rec->def_vec,
2503 collection_rec->next_def,
2504 df->def_regs,
2505 &df->def_info, false);
2506 }
2507 if (collection_rec->use_vec)
2508 {
2509 if (insn_rec->uses && *insn_rec->uses)
2510 free (insn_rec->uses);
2511 insn_rec->uses
2512 = df_install_refs (bb, collection_rec->use_vec,
2513 collection_rec->next_use,
2514 df->use_regs,
2515 &df->use_info, false);
2516 }
2517 if (collection_rec->eq_use_vec)
2518 {
2519 if (insn_rec->eq_uses && *insn_rec->eq_uses)
2520 free (insn_rec->eq_uses);
2521 insn_rec->eq_uses
2522 = df_install_refs (bb, collection_rec->eq_use_vec,
2523 collection_rec->next_eq_use,
2524 df->eq_use_regs,
2525 &df->use_info, true);
2526 }
2527 if (collection_rec->mw_vec)
2528 {
2529 if (insn_rec->mw_hardregs && *insn_rec->mw_hardregs)
2530 free (insn_rec->mw_hardregs);
2531 insn_rec->mw_hardregs
2532 = df_install_mws (collection_rec->mw_vec,
2533 collection_rec->next_mw);
2534 }
2535 }
2536 else
2537 {
2538 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2539
2540 if (bb_info->artificial_defs && *bb_info->artificial_defs)
2541 free (bb_info->artificial_defs);
2542 bb_info->artificial_defs
2543 = df_install_refs (bb, collection_rec->def_vec,
2544 collection_rec->next_def,
2545 df->def_regs,
2546 &df->def_info, false);
2547 if (bb_info->artificial_uses && *bb_info->artificial_uses)
2548 free (bb_info->artificial_uses);
2549 bb_info->artificial_uses
2550 = df_install_refs (bb, collection_rec->use_vec,
2551 collection_rec->next_use,
2552 df->use_regs,
2553 &df->use_info, false);
2554 }
2555 }
2556
2557
2558 /* Allocate a ref and initialize its fields. */
2559
2560 static struct df_ref *
2561 df_ref_create_structure (struct df_collection_rec *collection_rec,
2562 rtx reg, rtx *loc,
2563 basic_block bb, rtx insn,
2564 enum df_ref_type ref_type,
2565 enum df_ref_flags ref_flags)
2566 {
2567 struct df_ref *this_ref;
2568 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2569 struct df_scan_problem_data *problem_data
2570 = (struct df_scan_problem_data *) df_scan->problem_data;
2571
2572 this_ref = pool_alloc (problem_data->ref_pool);
2573 DF_REF_ID (this_ref) = -1;
2574 DF_REF_REG (this_ref) = reg;
2575 DF_REF_REGNO (this_ref) = regno;
2576 DF_REF_LOC (this_ref) = loc;
2577 DF_REF_INSN (this_ref) = insn;
2578 DF_REF_CHAIN (this_ref) = NULL;
2579 DF_REF_TYPE (this_ref) = ref_type;
2580 DF_REF_FLAGS (this_ref) = ref_flags;
2581 DF_REF_BB (this_ref) = bb;
2582 DF_REF_NEXT_REG (this_ref) = NULL;
2583 DF_REF_PREV_REG (this_ref) = NULL;
2584 DF_REF_ORDER (this_ref) = df->ref_order++;
2585
2586 /* We need to clear this bit because fwprop, and in the future
2587 possibly other optimizations sometimes create new refs using ond
2588 refs as the model. */
2589 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2590
2591 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2592 if ((regno < FIRST_PSEUDO_REGISTER)
2593 && (!DF_REF_IS_ARTIFICIAL (this_ref)))
2594 {
2595 if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
2596 {
2597 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2598 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2599 }
2600 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2601 && (regno == FRAME_POINTER_REGNUM
2602 || regno == ARG_POINTER_REGNUM)))
2603 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2604 }
2605
2606 if (collection_rec)
2607 {
2608 if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
2609 collection_rec->def_vec[collection_rec->next_def++] = this_ref;
2610 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2611 collection_rec->eq_use_vec[collection_rec->next_eq_use++] = this_ref;
2612 else
2613 collection_rec->use_vec[collection_rec->next_use++] = this_ref;
2614 }
2615
2616 return this_ref;
2617 }
2618
2619
2620 /* Create new references of type DF_REF_TYPE for each part of register REG
2621 at address LOC within INSN of BB. */
2622
2623 static void
2624 df_ref_record (struct df_collection_rec *collection_rec,
2625 rtx reg, rtx *loc,
2626 basic_block bb, rtx insn,
2627 enum df_ref_type ref_type,
2628 enum df_ref_flags ref_flags)
2629 {
2630 rtx oldreg = reg;
2631 unsigned int regno;
2632
2633 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2634
2635 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2636 if (regno < FIRST_PSEUDO_REGISTER)
2637 {
2638 struct df_mw_hardreg *hardreg = NULL;
2639 struct df_scan_problem_data *problem_data
2640 = (struct df_scan_problem_data *) df_scan->problem_data;
2641 unsigned int i;
2642 unsigned int endregno;
2643 struct df_ref *ref;
2644
2645 if (GET_CODE (reg) == SUBREG)
2646 {
2647 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2648 SUBREG_BYTE (reg), GET_MODE (reg));
2649 endregno = regno + subreg_nregs (reg);
2650 }
2651 else
2652 endregno = END_HARD_REGNO (reg);
2653
2654 /* If this is a multiword hardreg, we create some extra
2655 datastructures that will enable us to easily build REG_DEAD
2656 and REG_UNUSED notes. */
2657 if ((endregno != regno + 1) && insn)
2658 {
2659 /* Sets to a subreg of a multiword register are partial.
2660 Sets to a non-subreg of a multiword register are not. */
2661 if (GET_CODE (oldreg) == SUBREG)
2662 ref_flags |= DF_REF_PARTIAL;
2663 ref_flags |= DF_REF_MW_HARDREG;
2664
2665 hardreg = pool_alloc (problem_data->mw_reg_pool);
2666 hardreg->type = ref_type;
2667 hardreg->flags = ref_flags;
2668 hardreg->mw_reg = reg;
2669 hardreg->loc = loc;
2670 hardreg->start_regno = regno;
2671 hardreg->end_regno = endregno - 1;
2672 hardreg->mw_order = df->ref_order++;
2673 collection_rec->mw_vec[collection_rec->next_mw++] = hardreg;
2674 }
2675
2676 for (i = regno; i < endregno; i++)
2677 {
2678 ref = df_ref_create_structure (collection_rec, regno_reg_rtx[i], loc,
2679 bb, insn, ref_type, ref_flags);
2680
2681 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2682 }
2683 }
2684 else
2685 {
2686 struct df_ref *ref;
2687 ref = df_ref_create_structure (collection_rec, reg, loc, bb, insn,
2688 ref_type, ref_flags);
2689 }
2690 }
2691
2692
2693 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2694 covered by the outer mode is smaller than that covered by the inner mode,
2695 is a read-modify-write operation.
2696 This function returns true iff the SUBREG X is such a SUBREG. */
2697
2698 bool
2699 df_read_modify_subreg_p (rtx x)
2700 {
2701 unsigned int isize, osize;
2702 if (GET_CODE (x) != SUBREG)
2703 return false;
2704 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2705 osize = GET_MODE_SIZE (GET_MODE (x));
2706 return isize > osize
2707 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2708 }
2709
2710
2711 /* Process all the registers defined in the rtx, X.
2712 Autoincrement/decrement definitions will be picked up by
2713 df_uses_record. */
2714
2715 static void
2716 df_def_record_1 (struct df_collection_rec *collection_rec,
2717 rtx x, basic_block bb, rtx insn,
2718 enum df_ref_flags flags)
2719 {
2720 rtx *loc;
2721 rtx dst;
2722
2723 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
2724 construct. */
2725 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
2726 loc = &XEXP (x, 0);
2727 else
2728 loc = &SET_DEST (x);
2729 dst = *loc;
2730
2731 /* It is legal to have a set destination be a parallel. */
2732 if (GET_CODE (dst) == PARALLEL)
2733 {
2734 int i;
2735
2736 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2737 {
2738 rtx temp = XVECEXP (dst, 0, i);
2739 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
2740 || GET_CODE (temp) == SET)
2741 df_def_record_1 (collection_rec,
2742 temp, bb, insn,
2743 GET_CODE (temp) == CLOBBER
2744 ? flags | DF_REF_MUST_CLOBBER : flags);
2745 }
2746 return;
2747 }
2748
2749 /* Maybe, we should flag the use of STRICT_LOW_PART somehow. It might
2750 be handy for the reg allocator. */
2751 while (GET_CODE (dst) == STRICT_LOW_PART
2752 || GET_CODE (dst) == ZERO_EXTRACT)
2753 {
2754 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2755 loc = &XEXP (dst, 0);
2756 dst = *loc;
2757 }
2758
2759 if (df_read_modify_subreg_p (dst))
2760 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2761
2762 if (REG_P (dst)
2763 || (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))))
2764 df_ref_record (collection_rec,
2765 dst, loc, bb, insn, DF_REF_REG_DEF, flags);
2766
2767 /* We want to keep sp alive everywhere - by making all
2768 writes to sp also use of sp. */
2769 if (REG_P (dst) && REGNO (dst) == STACK_POINTER_REGNUM)
2770 df_ref_record (collection_rec,
2771 dst, NULL, bb, insn, DF_REF_REG_USE, flags);
2772 }
2773
2774
2775 /* Process all the registers defined in the pattern rtx, X. */
2776
2777 static void
2778 df_defs_record (struct df_collection_rec *collection_rec,
2779 rtx x, basic_block bb, rtx insn, enum df_ref_flags flags)
2780 {
2781 RTX_CODE code = GET_CODE (x);
2782
2783 if (code == SET || code == CLOBBER)
2784 {
2785 /* Mark the single def within the pattern. */
2786 enum df_ref_flags clobber_flags = flags;
2787 clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
2788 df_def_record_1 (collection_rec, x, bb, insn, clobber_flags);
2789 }
2790 else if (code == COND_EXEC)
2791 {
2792 df_defs_record (collection_rec, COND_EXEC_CODE (x),
2793 bb, insn, DF_REF_CONDITIONAL);
2794 }
2795 else if (code == PARALLEL)
2796 {
2797 int i;
2798
2799 /* Mark the multiple defs within the pattern. */
2800 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2801 df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn, flags);
2802 }
2803 }
2804
2805
2806 /* Process all the registers used in the rtx at address LOC. */
2807
2808 static void
2809 df_uses_record (struct df_collection_rec *collection_rec,
2810 rtx *loc, enum df_ref_type ref_type,
2811 basic_block bb, rtx insn, enum df_ref_flags flags)
2812 {
2813 RTX_CODE code;
2814 rtx x;
2815
2816 retry:
2817 x = *loc;
2818 if (!x)
2819 return;
2820 code = GET_CODE (x);
2821 switch (code)
2822 {
2823 case LABEL_REF:
2824 case SYMBOL_REF:
2825 case CONST_INT:
2826 case CONST:
2827 case CONST_DOUBLE:
2828 case CONST_VECTOR:
2829 case PC:
2830 case CC0:
2831 case ADDR_VEC:
2832 case ADDR_DIFF_VEC:
2833 return;
2834
2835 case CLOBBER:
2836 /* If we are clobbering a MEM, mark any registers inside the address
2837 as being used. */
2838 if (MEM_P (XEXP (x, 0)))
2839 df_uses_record (collection_rec,
2840 &XEXP (XEXP (x, 0), 0),
2841 DF_REF_REG_MEM_STORE, bb, insn, flags);
2842
2843 /* If we're clobbering a REG then we have a def so ignore. */
2844 return;
2845
2846 case MEM:
2847 df_uses_record (collection_rec,
2848 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
2849 bb, insn, flags & DF_REF_IN_NOTE);
2850 return;
2851
2852 case SUBREG:
2853 /* While we're here, optimize this case. */
2854 flags |= DF_REF_PARTIAL;
2855 /* In case the SUBREG is not of a REG, do not optimize. */
2856 if (!REG_P (SUBREG_REG (x)))
2857 {
2858 loc = &SUBREG_REG (x);
2859 df_uses_record (collection_rec, loc, ref_type, bb, insn, flags);
2860 return;
2861 }
2862 /* ... Fall through ... */
2863
2864 case REG:
2865 df_ref_record (collection_rec,
2866 x, loc, bb, insn, ref_type, flags);
2867 return;
2868
2869 case SET:
2870 {
2871 rtx dst = SET_DEST (x);
2872 gcc_assert (!(flags & DF_REF_IN_NOTE));
2873 df_uses_record (collection_rec,
2874 &SET_SRC (x), DF_REF_REG_USE, bb, insn, flags);
2875
2876 switch (GET_CODE (dst))
2877 {
2878 case SUBREG:
2879 if (df_read_modify_subreg_p (dst))
2880 {
2881 df_uses_record (collection_rec, &SUBREG_REG (dst),
2882 DF_REF_REG_USE, bb, insn, flags | DF_REF_READ_WRITE);
2883 break;
2884 }
2885 /* Fall through. */
2886 case REG:
2887 case PARALLEL:
2888 case SCRATCH:
2889 case PC:
2890 case CC0:
2891 break;
2892 case MEM:
2893 df_uses_record (collection_rec, &XEXP (dst, 0),
2894 DF_REF_REG_MEM_STORE, bb, insn, flags);
2895 break;
2896 case STRICT_LOW_PART:
2897 {
2898 rtx *temp = &XEXP (dst, 0);
2899 /* A strict_low_part uses the whole REG and not just the
2900 SUBREG. */
2901 dst = XEXP (dst, 0);
2902 df_uses_record (collection_rec,
2903 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
2904 DF_REF_REG_USE, bb, insn, DF_REF_READ_WRITE);
2905 }
2906 break;
2907 case ZERO_EXTRACT:
2908 case SIGN_EXTRACT:
2909 df_uses_record (collection_rec, &XEXP (dst, 0),
2910 DF_REF_REG_USE, bb, insn, DF_REF_READ_WRITE);
2911 df_uses_record (collection_rec, &XEXP (dst, 1),
2912 DF_REF_REG_USE, bb, insn, flags);
2913 df_uses_record (collection_rec, &XEXP (dst, 2),
2914 DF_REF_REG_USE, bb, insn, flags);
2915 dst = XEXP (dst, 0);
2916 break;
2917 default:
2918 gcc_unreachable ();
2919 }
2920 return;
2921 }
2922
2923 case RETURN:
2924 break;
2925
2926 case ASM_OPERANDS:
2927 case UNSPEC_VOLATILE:
2928 case TRAP_IF:
2929 case ASM_INPUT:
2930 {
2931 /* Traditional and volatile asm instructions must be
2932 considered to use and clobber all hard registers, all
2933 pseudo-registers and all of memory. So must TRAP_IF and
2934 UNSPEC_VOLATILE operations.
2935
2936 Consider for instance a volatile asm that changes the fpu
2937 rounding mode. An insn should not be moved across this
2938 even if it only uses pseudo-regs because it might give an
2939 incorrectly rounded result.
2940
2941 However, flow.c's liveness computation did *not* do this,
2942 giving the reasoning as " ?!? Unfortunately, marking all
2943 hard registers as live causes massive problems for the
2944 register allocator and marking all pseudos as live creates
2945 mountains of uninitialized variable warnings."
2946
2947 In order to maintain the status quo with regard to liveness
2948 and uses, we do what flow.c did and just mark any regs we
2949 can find in ASM_OPERANDS as used. In global asm insns are
2950 scanned and regs_asm_clobbered is filled out.
2951
2952 For all ASM_OPERANDS, we must traverse the vector of input
2953 operands. We can not just fall through here since then we
2954 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
2955 which do not indicate traditional asms unlike their normal
2956 usage. */
2957 if (code == ASM_OPERANDS)
2958 {
2959 int j;
2960
2961 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
2962 df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
2963 DF_REF_REG_USE, bb, insn, flags);
2964 return;
2965 }
2966 break;
2967 }
2968
2969 case PRE_DEC:
2970 case POST_DEC:
2971 case PRE_INC:
2972 case POST_INC:
2973 case PRE_MODIFY:
2974 case POST_MODIFY:
2975 /* Catch the def of the register being modified. */
2976 df_ref_record (collection_rec, XEXP (x, 0), &XEXP (x, 0), bb, insn,
2977 DF_REF_REG_DEF,
2978 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
2979
2980 /* ... Fall through to handle uses ... */
2981
2982 default:
2983 break;
2984 }
2985
2986 /* Recursively scan the operands of this expression. */
2987 {
2988 const char *fmt = GET_RTX_FORMAT (code);
2989 int i;
2990
2991 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2992 {
2993 if (fmt[i] == 'e')
2994 {
2995 /* Tail recursive case: save a function call level. */
2996 if (i == 0)
2997 {
2998 loc = &XEXP (x, 0);
2999 goto retry;
3000 }
3001 df_uses_record (collection_rec, &XEXP (x, i), ref_type, bb, insn, flags);
3002 }
3003 else if (fmt[i] == 'E')
3004 {
3005 int j;
3006 for (j = 0; j < XVECLEN (x, i); j++)
3007 df_uses_record (collection_rec,
3008 &XVECEXP (x, i, j), ref_type, bb, insn, flags);
3009 }
3010 }
3011 }
3012
3013 return;
3014 }
3015
3016
3017 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3018
3019 static void
3020 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3021 {
3022 unsigned int i;
3023 for (i = 0; i < collection_rec->next_def; i++)
3024 {
3025 struct df_ref *ref = collection_rec->def_vec[i];
3026 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3027 {
3028 struct df_ref *use
3029 = df_ref_create_structure (collection_rec, DF_REF_REG (ref),
3030 DF_REF_LOC (ref), DF_REF_BB (ref),
3031 DF_REF_INSN (ref), DF_REF_REG_USE,
3032 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
3033 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3034 }
3035 }
3036 }
3037
3038
3039 /* Get call's extra defs and uses. */
3040
3041 static void
3042 df_get_call_refs (struct df_collection_rec * collection_rec,
3043 basic_block bb,
3044 rtx insn,
3045 enum df_ref_flags flags)
3046 {
3047 rtx note;
3048 bitmap_iterator bi;
3049 unsigned int ui;
3050 bool is_sibling_call;
3051 unsigned int i;
3052 bitmap defs_generated = BITMAP_ALLOC (&df_bitmap_obstack);
3053
3054 /* Do not generate clobbers for registers that are the result of the
3055 call. This causes ordering problems in the chain building code
3056 depending on which def is seen first. */
3057 for (i=0; i<collection_rec->next_def; i++)
3058 {
3059 struct df_ref *def = collection_rec->def_vec[i];
3060 bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
3061 }
3062
3063 /* Record the registers used to pass arguments, and explicitly
3064 noted as clobbered. */
3065 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
3066 note = XEXP (note, 1))
3067 {
3068 if (GET_CODE (XEXP (note, 0)) == USE)
3069 df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3070 DF_REF_REG_USE, bb, insn, flags);
3071 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3072 {
3073 if (REG_P (XEXP (XEXP (note, 0), 0)))
3074 {
3075 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3076 if (!bitmap_bit_p (defs_generated, regno))
3077 df_defs_record (collection_rec, XEXP (note, 0), bb,
3078 insn, flags);
3079 }
3080 else
3081 df_uses_record (collection_rec, &XEXP (note, 0),
3082 DF_REF_REG_USE, bb, insn, flags);
3083 }
3084 }
3085
3086 /* The stack ptr is used (honorarily) by a CALL insn. */
3087 df_ref_record (collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
3088 NULL, bb, insn, DF_REF_REG_USE, DF_REF_CALL_STACK_USAGE | flags);
3089
3090 /* Calls may also reference any of the global registers,
3091 so they are recorded as used. */
3092 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3093 if (global_regs[i])
3094 df_ref_record (collection_rec, regno_reg_rtx[i],
3095 NULL, bb, insn, DF_REF_REG_USE, flags);
3096
3097 is_sibling_call = SIBLING_CALL_P (insn);
3098 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
3099 {
3100 if ((!bitmap_bit_p (defs_generated, ui))
3101 && (!is_sibling_call
3102 || !bitmap_bit_p (df->exit_block_uses, ui)
3103 || refers_to_regno_p (ui, ui+1,
3104 current_function_return_rtx, NULL)))
3105
3106 df_ref_record (collection_rec, regno_reg_rtx[ui],
3107 NULL, bb, insn, DF_REF_REG_DEF, DF_REF_MAY_CLOBBER | flags);
3108 }
3109
3110 BITMAP_FREE (defs_generated);
3111 return;
3112 }
3113
3114 /* Collect all refs in the INSN. This function is free of any
3115 side-effect - it will create and return a lists of df_ref's in the
3116 COLLECTION_REC without putting those refs into existing ref chains
3117 and reg chains. */
3118
3119 static void
3120 df_insn_refs_collect (struct df_collection_rec* collection_rec,
3121 basic_block bb, rtx insn)
3122 {
3123 rtx note;
3124 bool is_cond_exec = (GET_CODE (PATTERN (insn)) == COND_EXEC);
3125
3126 /* Clear out the collection record. */
3127 collection_rec->next_def = 0;
3128 collection_rec->next_use = 0;
3129 collection_rec->next_eq_use = 0;
3130 collection_rec->next_mw = 0;
3131
3132 /* Record register defs. */
3133 df_defs_record (collection_rec, PATTERN (insn), bb, insn, 0);
3134
3135 /* Process REG_EQUIV/REG_EQUAL notes */
3136 for (note = REG_NOTES (insn); note;
3137 note = XEXP (note, 1))
3138 {
3139 switch (REG_NOTE_KIND (note))
3140 {
3141 case REG_EQUIV:
3142 case REG_EQUAL:
3143 df_uses_record (collection_rec,
3144 &XEXP (note, 0), DF_REF_REG_USE,
3145 bb, insn, DF_REF_IN_NOTE);
3146 break;
3147 case REG_NON_LOCAL_GOTO:
3148 /* The frame ptr is used by a non-local goto. */
3149 df_ref_record (collection_rec,
3150 regno_reg_rtx[FRAME_POINTER_REGNUM],
3151 NULL,
3152 bb, insn,
3153 DF_REF_REG_USE, 0);
3154 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3155 df_ref_record (collection_rec,
3156 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3157 NULL,
3158 bb, insn,
3159 DF_REF_REG_USE, 0);
3160 #endif
3161 break;
3162 default:
3163 break;
3164 }
3165 }
3166
3167 if (CALL_P (insn))
3168 df_get_call_refs (collection_rec, bb, insn,
3169 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3170
3171 /* Record the register uses. */
3172 df_uses_record (collection_rec,
3173 &PATTERN (insn), DF_REF_REG_USE, bb, insn, 0);
3174
3175 /* DF_REF_CONDITIONAL needs corresponding USES. */
3176 if (is_cond_exec)
3177 df_get_conditional_uses (collection_rec);
3178
3179 df_canonize_collection_rec (collection_rec);
3180 }
3181
3182 /* Return true if any pred of BB is an eh. */
3183
3184 bool
3185 df_has_eh_preds (basic_block bb)
3186 {
3187 edge e;
3188 edge_iterator ei;
3189
3190 FOR_EACH_EDGE (e, ei, bb->preds)
3191 {
3192 if (e->flags & EDGE_EH)
3193 return true;
3194 }
3195 return false;
3196 }
3197
3198
3199 /* Recompute the luids for the insns in BB. */
3200
3201 void
3202 df_recompute_luids (basic_block bb)
3203 {
3204 rtx insn;
3205 int luid = 0;
3206
3207 df_grow_insn_info ();
3208
3209 /* Scan the block an insn at a time from beginning to end. */
3210 FOR_BB_INSNS (bb, insn)
3211 {
3212 struct df_insn_info *insn_info = DF_INSN_GET (insn);
3213 /* Inserting labels does not always trigger the incremental
3214 rescanning. */
3215 if (!insn_info)
3216 {
3217 gcc_assert (!INSN_P (insn));
3218 df_insn_create_insn_record (insn);
3219 }
3220
3221 DF_INSN_LUID (insn) = luid;
3222 if (INSN_P (insn))
3223 luid++;
3224 }
3225 }
3226
3227
3228 /* Returns true if the function entry needs to
3229 define the static chain register. */
3230
3231 static bool
3232 df_need_static_chain_reg (struct function *fun)
3233 {
3234 tree fun_context = decl_function_context (fun->decl);
3235 return fun_context
3236 && DECL_NO_STATIC_CHAIN (fun_context) == false;
3237 }
3238
3239
3240 /* Collect all artificial refs at the block level for BB and add them
3241 to COLLECTION_REC. */
3242
3243 static void
3244 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3245 {
3246 collection_rec->next_def = 0;
3247 collection_rec->next_use = 0;
3248 collection_rec->next_eq_use = 0;
3249 collection_rec->next_mw = 0;
3250
3251 if (bb->index == ENTRY_BLOCK)
3252 {
3253 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3254 return;
3255 }
3256 else if (bb->index == EXIT_BLOCK)
3257 {
3258 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3259 return;
3260 }
3261
3262 #ifdef EH_RETURN_DATA_REGNO
3263 if (df_has_eh_preds (bb))
3264 {
3265 unsigned int i;
3266 /* Mark the registers that will contain data for the handler. */
3267 for (i = 0; ; ++i)
3268 {
3269 unsigned regno = EH_RETURN_DATA_REGNO (i);
3270 if (regno == INVALID_REGNUM)
3271 break;
3272 df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
3273 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3274 }
3275 }
3276 #endif
3277
3278
3279 #ifdef EH_USES
3280 if (df_has_eh_preds (bb))
3281 {
3282 unsigned int i;
3283 /* This code is putting in an artificial ref for the use at the
3284 TOP of the block that receives the exception. It is too
3285 cumbersome to actually put the ref on the edge. We could
3286 either model this at the top of the receiver block or the
3287 bottom of the sender block.
3288
3289 The bottom of the sender block is problematic because not all
3290 out-edges of the a block are eh-edges. However, it is true
3291 that all edges into a block are either eh-edges or none of
3292 them are eh-edges. Thus, we can model this at the top of the
3293 eh-receiver for all of the edges at once. */
3294 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3295 if (EH_USES (i))
3296 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3297 bb, NULL, DF_REF_REG_USE, DF_REF_AT_TOP);
3298 }
3299 #endif
3300
3301 /* Add the hard_frame_pointer if this block is the target of a
3302 non-local goto. */
3303 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3304 df_ref_record (collection_rec, hard_frame_pointer_rtx, NULL,
3305 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3306
3307 /* Add the artificial uses. */
3308 if (bb->index >= NUM_FIXED_BLOCKS)
3309 {
3310 bitmap_iterator bi;
3311 unsigned int regno;
3312 bitmap au = df_has_eh_preds (bb)
3313 ? df->eh_block_artificial_uses
3314 : df->regular_block_artificial_uses;
3315
3316 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3317 {
3318 df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
3319 bb, NULL, DF_REF_REG_USE, 0);
3320 }
3321 }
3322
3323 df_canonize_collection_rec (collection_rec);
3324 }
3325
3326
3327 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3328
3329 void
3330 df_bb_refs_record (int bb_index, bool scan_insns)
3331 {
3332 basic_block bb = BASIC_BLOCK (bb_index);
3333 rtx insn;
3334 int luid = 0;
3335 struct df_scan_bb_info *bb_info;
3336 struct df_collection_rec collection_rec;
3337 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
3338 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
3339 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
3340 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
3341
3342 if (!df)
3343 return;
3344
3345 bb_info = df_scan_get_bb_info (bb_index);
3346
3347 /* Need to make sure that there is a record in the basic block info. */
3348 if (!bb_info)
3349 {
3350 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
3351 df_scan_set_bb_info (bb_index, bb_info);
3352 bb_info->artificial_defs = NULL;
3353 bb_info->artificial_uses = NULL;
3354 }
3355
3356 if (scan_insns)
3357 /* Scan the block an insn at a time from beginning to end. */
3358 FOR_BB_INSNS (bb, insn)
3359 {
3360 struct df_insn_info *insn_info = DF_INSN_GET (insn);
3361 gcc_assert (!insn_info);
3362
3363 df_insn_create_insn_record (insn);
3364 if (INSN_P (insn))
3365 {
3366 /* Record refs within INSN. */
3367 DF_INSN_LUID (insn) = luid++;
3368 df_insn_refs_collect (&collection_rec, bb, insn);
3369 df_refs_add_to_chains (&collection_rec, bb, insn);
3370 }
3371 DF_INSN_LUID (insn) = luid;
3372 }
3373
3374 /* Other block level artificial refs */
3375 df_bb_refs_collect (&collection_rec, bb);
3376 df_refs_add_to_chains (&collection_rec, bb, NULL);
3377
3378 /* Now that the block has been processed, set the block as dirty so
3379 lr and ur will get it processed. */
3380 df_set_bb_dirty (bb);
3381 }
3382
3383
3384 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3385 block. */
3386
3387 static void
3388 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3389 {
3390 bitmap_clear (regular_block_artificial_uses);
3391
3392 if (reload_completed)
3393 {
3394 if (frame_pointer_needed)
3395 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3396 }
3397 else
3398 /* Before reload, there are a few registers that must be forced
3399 live everywhere -- which might not already be the case for
3400 blocks within infinite loops. */
3401 {
3402 /* Any reference to any pseudo before reload is a potential
3403 reference of the frame pointer. */
3404 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3405
3406 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3407 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3408 #endif
3409
3410 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3411 /* Pseudos with argument area equivalences may require
3412 reloading via the argument pointer. */
3413 if (fixed_regs[ARG_POINTER_REGNUM])
3414 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3415 #endif
3416
3417 /* Any constant, or pseudo with constant equivalences, may
3418 require reloading from memory using the pic register. */
3419 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3420 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3421 bitmap_set_bit (regular_block_artificial_uses, PIC_OFFSET_TABLE_REGNUM);
3422 }
3423 /* The all-important stack pointer must always be live. */
3424 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3425 }
3426
3427
3428 /* Get the artificial use set for an eh block. */
3429
3430 static void
3431 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3432 {
3433 bitmap_clear (eh_block_artificial_uses);
3434
3435 /* The following code (down thru the arg_pointer setting APPEARS
3436 to be necessary because there is nothing that actually
3437 describes what the exception handling code may actually need
3438 to keep alive. */
3439 if (reload_completed)
3440 {
3441 if (frame_pointer_needed)
3442 {
3443 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3444 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3445 bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3446 #endif
3447 }
3448 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3449 if (fixed_regs[ARG_POINTER_REGNUM])
3450 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3451 #endif
3452 }
3453 }
3454
3455
3456 \f
3457 /*----------------------------------------------------------------------------
3458 Specialized hard register scanning functions.
3459 ----------------------------------------------------------------------------*/
3460
3461
3462 /* Mark a register in SET. Hard registers in large modes get all
3463 of their component registers set as well. */
3464
3465 static void
3466 df_mark_reg (rtx reg, void *vset)
3467 {
3468 bitmap set = (bitmap) vset;
3469 int regno = REGNO (reg);
3470
3471 gcc_assert (GET_MODE (reg) != BLKmode);
3472
3473 bitmap_set_bit (set, regno);
3474 if (regno < FIRST_PSEUDO_REGISTER)
3475 {
3476 int n = hard_regno_nregs[regno][GET_MODE (reg)];
3477 while (--n > 0)
3478 bitmap_set_bit (set, regno + n);
3479 }
3480 }
3481
3482
3483
3484
3485 /* Set the bit for regs that are considered being defined at the entry. */
3486
3487 static void
3488 df_get_entry_block_def_set (bitmap entry_block_defs)
3489 {
3490 rtx r;
3491 int i;
3492
3493 bitmap_clear (entry_block_defs);
3494
3495 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3496 {
3497 if (FUNCTION_ARG_REGNO_P (i))
3498 #ifdef INCOMING_REGNO
3499 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3500 #else
3501 bitmap_set_bit (entry_block_defs, i);
3502 #endif
3503 }
3504
3505 /* Once the prologue has been generated, all of these registers
3506 should just show up in the first regular block. */
3507 if (HAVE_prologue && epilogue_completed)
3508 {
3509 /* Defs for the callee saved registers are inserted so that the
3510 pushes have some defining location. */
3511 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3512 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3513 bitmap_set_bit (entry_block_defs, i);
3514 }
3515 else
3516 {
3517 /* The always important stack pointer. */
3518 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3519
3520 /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
3521 only STATIC_CHAIN_REGNUM is defined. If they are different,
3522 we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
3523 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3524 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
3525 #else
3526 #ifdef STATIC_CHAIN_REGNUM
3527 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3528 #endif
3529 #endif
3530
3531 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3532 if (r && REG_P (r))
3533 bitmap_set_bit (entry_block_defs, REGNO (r));
3534 }
3535
3536 if ((!reload_completed) || frame_pointer_needed)
3537 {
3538 /* Any reference to any pseudo before reload is a potential
3539 reference of the frame pointer. */
3540 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3541 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3542 /* If they are different, also mark the hard frame pointer as live. */
3543 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3544 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3545 #endif
3546 }
3547
3548 /* These registers are live everywhere. */
3549 if (!reload_completed)
3550 {
3551 #ifdef EH_USES
3552 /* The ia-64, the only machine that uses this, does not define these
3553 until after reload. */
3554 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3555 if (EH_USES (i))
3556 {
3557 bitmap_set_bit (entry_block_defs, i);
3558 }
3559 #endif
3560
3561 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3562 /* Pseudos with argument area equivalences may require
3563 reloading via the argument pointer. */
3564 if (fixed_regs[ARG_POINTER_REGNUM])
3565 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3566 #endif
3567
3568 #ifdef PIC_OFFSET_TABLE_REGNUM
3569 /* Any constant, or pseudo with constant equivalences, may
3570 require reloading from memory using the pic register. */
3571 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3572 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3573 bitmap_set_bit (entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
3574 #endif
3575 }
3576
3577 #ifdef INCOMING_RETURN_ADDR_RTX
3578 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3579 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3580 #endif
3581
3582 targetm.live_on_entry (entry_block_defs);
3583
3584 /* If the function has an incoming STATIC_CHAIN,
3585 it has to show up in the entry def set. */
3586 if (df_need_static_chain_reg (cfun))
3587 {
3588 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3589 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
3590 #else
3591 #ifdef STATIC_CHAIN_REGNUM
3592 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3593 #endif
3594 #endif
3595 }
3596 }
3597
3598
3599 /* Return the (conservative) set of hard registers that are defined on
3600 entry to the function.
3601 It uses df->entry_block_defs to determine which register
3602 reference to include. */
3603
3604 static void
3605 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3606 bitmap entry_block_defs)
3607 {
3608 unsigned int i;
3609 bitmap_iterator bi;
3610
3611 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3612 {
3613 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3614 ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0);
3615 }
3616
3617 df_canonize_collection_rec (collection_rec);
3618 }
3619
3620
3621 /* Record the (conservative) set of hard registers that are defined on
3622 entry to the function. */
3623
3624 static void
3625 df_record_entry_block_defs (bitmap entry_block_defs)
3626 {
3627 struct df_collection_rec collection_rec;
3628 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3629 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
3630
3631 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3632
3633 /* Process bb_refs chain */
3634 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
3635 }
3636
3637
3638 /* Update the defs in the entry bolck. */
3639
3640 void
3641 df_update_entry_block_defs (void)
3642 {
3643 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3644 bool changed = false;
3645
3646 df_get_entry_block_def_set (refs);
3647 if (df->entry_block_defs)
3648 {
3649 if (!bitmap_equal_p (df->entry_block_defs, refs))
3650 {
3651 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3652 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3653 df_ref_chain_delete (bb_info->artificial_defs);
3654 bb_info->artificial_defs = NULL;
3655 changed = true;
3656 }
3657 }
3658 else
3659 {
3660 struct df_scan_problem_data *problem_data
3661 = (struct df_scan_problem_data *) df_scan->problem_data;
3662 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3663 changed = true;
3664 }
3665
3666 if (changed)
3667 {
3668 df_record_entry_block_defs (refs);
3669 bitmap_copy (df->entry_block_defs, refs);
3670 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
3671 }
3672 BITMAP_FREE (refs);
3673 }
3674
3675
3676 /* Set the bit for regs that are considered being used at the exit. */
3677
3678 static void
3679 df_get_exit_block_use_set (bitmap exit_block_uses)
3680 {
3681 unsigned int i;
3682
3683 bitmap_clear (exit_block_uses);
3684
3685 /* Stack pointer is always live at the exit. */
3686 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3687
3688 /* Mark the frame pointer if needed at the end of the function.
3689 If we end up eliminating it, it will be removed from the live
3690 list of each basic block by reload. */
3691
3692 if ((!reload_completed) || frame_pointer_needed)
3693 {
3694 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3695 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3696 /* If they are different, also mark the hard frame pointer as live. */
3697 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3698 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3699 #endif
3700 }
3701
3702 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3703 /* Many architectures have a GP register even without flag_pic.
3704 Assume the pic register is not in use, or will be handled by
3705 other means, if it is not fixed. */
3706 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3707 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3708 bitmap_set_bit (exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
3709 #endif
3710
3711 /* Mark all global registers, and all registers used by the
3712 epilogue as being live at the end of the function since they
3713 may be referenced by our caller. */
3714 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3715 if (global_regs[i] || EPILOGUE_USES (i))
3716 bitmap_set_bit (exit_block_uses, i);
3717
3718 if (HAVE_epilogue && epilogue_completed)
3719 {
3720 /* Mark all call-saved registers that we actually used. */
3721 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3722 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3723 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3724 bitmap_set_bit (exit_block_uses, i);
3725 }
3726
3727 #ifdef EH_RETURN_DATA_REGNO
3728 /* Mark the registers that will contain data for the handler. */
3729 if (reload_completed && current_function_calls_eh_return)
3730 for (i = 0; ; ++i)
3731 {
3732 unsigned regno = EH_RETURN_DATA_REGNO (i);
3733 if (regno == INVALID_REGNUM)
3734 break;
3735 bitmap_set_bit (exit_block_uses, regno);
3736 }
3737 #endif
3738
3739 #ifdef EH_RETURN_STACKADJ_RTX
3740 if ((!HAVE_epilogue || ! epilogue_completed)
3741 && current_function_calls_eh_return)
3742 {
3743 rtx tmp = EH_RETURN_STACKADJ_RTX;
3744 if (tmp && REG_P (tmp))
3745 df_mark_reg (tmp, exit_block_uses);
3746 }
3747 #endif
3748
3749 #ifdef EH_RETURN_HANDLER_RTX
3750 if ((!HAVE_epilogue || ! epilogue_completed)
3751 && current_function_calls_eh_return)
3752 {
3753 rtx tmp = EH_RETURN_HANDLER_RTX;
3754 if (tmp && REG_P (tmp))
3755 df_mark_reg (tmp, exit_block_uses);
3756 }
3757 #endif
3758
3759 /* Mark function return value. */
3760 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
3761 }
3762
3763
3764 /* Return the refs of hard registers that are used in the exit block.
3765 It uses df->exit_block_uses to determine register to include. */
3766
3767 static void
3768 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
3769 {
3770 unsigned int i;
3771 bitmap_iterator bi;
3772
3773 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
3774 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3775 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
3776
3777 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3778 /* It is deliberate that this is not put in the exit block uses but
3779 I do not know why. */
3780 if (reload_completed
3781 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
3782 && df_has_eh_preds (EXIT_BLOCK_PTR)
3783 && fixed_regs[ARG_POINTER_REGNUM])
3784 df_ref_record (collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
3785 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
3786 #endif
3787
3788 df_canonize_collection_rec (collection_rec);
3789 }
3790
3791
3792 /* Record the set of hard registers that are used in the exit block.
3793 It uses df->exit_block_uses to determine which bit to include. */
3794
3795 static void
3796 df_record_exit_block_uses (bitmap exit_block_uses)
3797 {
3798 struct df_collection_rec collection_rec;
3799 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3800 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
3801
3802 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
3803
3804 /* Process bb_refs chain */
3805 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
3806 }
3807
3808
3809 /* Update the uses in the exit block. */
3810
3811 void
3812 df_update_exit_block_uses (void)
3813 {
3814 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3815 bool changed = false;
3816
3817 df_get_exit_block_use_set (refs);
3818 if (df->exit_block_uses)
3819 {
3820 if (!bitmap_equal_p (df->exit_block_uses, refs))
3821 {
3822 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
3823 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
3824 df_ref_chain_delete (bb_info->artificial_uses);
3825 bb_info->artificial_uses = NULL;
3826 changed = true;
3827 }
3828 }
3829 else
3830 {
3831 struct df_scan_problem_data *problem_data
3832 = (struct df_scan_problem_data *) df_scan->problem_data;
3833 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3834 changed = true;
3835 }
3836
3837 if (changed)
3838 {
3839 df_record_exit_block_uses (refs);
3840 bitmap_copy (df->exit_block_uses, refs);
3841 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
3842 }
3843 BITMAP_FREE (refs);
3844 }
3845
3846 static bool initialized = false;
3847
3848
3849 /* Initialize some platform specific structures. */
3850
3851 void
3852 df_hard_reg_init (void)
3853 {
3854 int i;
3855 #ifdef ELIMINABLE_REGS
3856 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
3857 #endif
3858 if (initialized)
3859 return;
3860
3861 bitmap_obstack_initialize (&persistent_obstack);
3862
3863 /* Record which registers will be eliminated. We use this in
3864 mark_used_regs. */
3865 CLEAR_HARD_REG_SET (elim_reg_set);
3866
3867 #ifdef ELIMINABLE_REGS
3868 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
3869 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
3870 #else
3871 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
3872 #endif
3873
3874 df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
3875
3876 /* Inconveniently, this is only readily available in hard reg set
3877 form. */
3878 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
3879 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3880 bitmap_set_bit (df_invalidated_by_call, i);
3881
3882 initialized = true;
3883 }
3884
3885
3886 /* Recompute the parts of scanning that are based on regs_ever_live
3887 because something changed in that array. */
3888
3889 void
3890 df_update_entry_exit_and_calls (void)
3891 {
3892 basic_block bb;
3893
3894 df_update_entry_block_defs ();
3895 df_update_exit_block_uses ();
3896
3897 /* The call insns need to be rescanned because there may be changes
3898 in the set of registers clobbered across the call. */
3899 FOR_EACH_BB (bb)
3900 {
3901 rtx insn;
3902 FOR_BB_INSNS (bb, insn)
3903 {
3904 if (INSN_P (insn) && CALL_P (insn))
3905 df_insn_rescan (insn);
3906 }
3907 }
3908 }
3909
3910
3911 /* Return true if hard REG is actually used in the some instruction.
3912 There are a fair number of conditions that affect the setting of
3913 this array. See the comment in df.h for df->hard_regs_live_count
3914 for the conditions that this array is set. */
3915
3916 bool
3917 df_hard_reg_used_p (unsigned int reg)
3918 {
3919 gcc_assert (df);
3920 return df->hard_regs_live_count[reg] != 0;
3921 }
3922
3923
3924 /* A count of the number of times REG is actually used in the some
3925 instruction. There are a fair number of conditions that affect the
3926 setting of this array. See the comment in df.h for
3927 df->hard_regs_live_count for the conditions that this array is
3928 set. */
3929
3930
3931 unsigned int
3932 df_hard_reg_used_count (unsigned int reg)
3933 {
3934 gcc_assert (df);
3935 return df->hard_regs_live_count[reg];
3936 }
3937
3938
3939 /* Get the value of regs_ever_live[REGNO]. */
3940
3941 bool
3942 df_regs_ever_live_p (unsigned int regno)
3943 {
3944 return regs_ever_live[regno];
3945 }
3946
3947
3948 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
3949 to change, schedule that change for the next update. */
3950
3951 void
3952 df_set_regs_ever_live (unsigned int regno, bool value)
3953 {
3954 if (regs_ever_live[regno] == value)
3955 return;
3956
3957 regs_ever_live[regno] = value;
3958 if (df)
3959 df->redo_entry_and_exit = true;
3960 }
3961
3962
3963 /* Compute "regs_ever_live" information from the underlying df
3964 information. Set the vector to all false if RESET. */
3965
3966 void
3967 df_compute_regs_ever_live (bool reset)
3968 {
3969 unsigned int i;
3970 bool changed = df->redo_entry_and_exit;
3971
3972 if (reset)
3973 memset (regs_ever_live, 0, sizeof (regs_ever_live));
3974
3975 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3976 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
3977 {
3978 regs_ever_live[i] = true;
3979 changed = true;
3980 }
3981 if (changed)
3982 df_update_entry_exit_and_calls ();
3983 df->redo_entry_and_exit = false;
3984 }
3985
3986 \f
3987 /*----------------------------------------------------------------------------
3988 Dataflow ref information verification functions.
3989
3990 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
3991 df_reg_chain_verify_unmarked (refs)
3992 df_refs_verify (ref*, ref*, bool)
3993 df_mws_verify (mw*, mw*, bool)
3994 df_insn_refs_verify (collection_rec, bb, insn, bool)
3995 df_bb_refs_verify (bb, refs, bool)
3996 df_bb_verify (bb)
3997 df_exit_block_bitmap_verify (bool)
3998 df_entry_block_bitmap_verify (bool)
3999 df_scan_verify ()
4000 ----------------------------------------------------------------------------*/
4001
4002
4003 /* Mark all refs in the reg chain. Verify that all of the registers
4004 are in the correct chain. */
4005
4006 static unsigned int
4007 df_reg_chain_mark (struct df_ref *refs, unsigned int regno,
4008 bool is_def, bool is_eq_use)
4009 {
4010 unsigned int count = 0;
4011 struct df_ref *ref;
4012 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4013 {
4014 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4015
4016 /* If there are no def-use or use-def chains, make sure that all
4017 of the chains are clear. */
4018 if (!df_chain)
4019 gcc_assert (!DF_REF_CHAIN (ref));
4020
4021 /* Check to make sure the ref is in the correct chain. */
4022 gcc_assert (DF_REF_REGNO (ref) == regno);
4023 if (is_def)
4024 gcc_assert (DF_REF_TYPE(ref) == DF_REF_REG_DEF);
4025 else
4026 gcc_assert (DF_REF_TYPE(ref) != DF_REF_REG_DEF);
4027
4028 if (is_eq_use)
4029 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4030 else
4031 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4032
4033 if (ref->next_reg)
4034 gcc_assert (ref->next_reg->prev_reg == ref);
4035 count++;
4036 DF_REF_REG_MARK (ref);
4037 }
4038 return count;
4039 }
4040
4041
4042 /* Verify that all of the registers in the chain are unmarked. */
4043
4044 static void
4045 df_reg_chain_verify_unmarked (struct df_ref *refs)
4046 {
4047 struct df_ref *ref;
4048 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4049 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4050 }
4051
4052
4053 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4054
4055 static bool
4056 df_refs_verify (struct df_ref **new_rec, struct df_ref **old_rec,
4057 bool abort_if_fail)
4058 {
4059 while ((*new_rec) && (*old_rec))
4060 {
4061 if (!df_ref_equal_p (*new_rec, *old_rec))
4062 {
4063 if (abort_if_fail)
4064 gcc_assert (0);
4065 else
4066 return false;
4067 }
4068
4069 /* Abort if fail is called from the function level verifier. If
4070 that is the context, mark this reg as being seem. */
4071 if (abort_if_fail)
4072 {
4073 gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
4074 DF_REF_REG_UNMARK (*old_rec);
4075 }
4076
4077 new_rec++;
4078 old_rec++;
4079 }
4080
4081 if (abort_if_fail)
4082 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4083 else
4084 return ((*new_rec == NULL) && (*old_rec == NULL));
4085 return false;
4086 }
4087
4088
4089 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4090
4091 static bool
4092 df_mws_verify (struct df_mw_hardreg **new_rec, struct df_mw_hardreg **old_rec,
4093 bool abort_if_fail)
4094 {
4095 while ((*new_rec) && (*old_rec))
4096 {
4097 if (!df_mw_equal_p (*new_rec, *old_rec))
4098 {
4099 if (abort_if_fail)
4100 gcc_assert (0);
4101 else
4102 return false;
4103 }
4104 new_rec++;
4105 old_rec++;
4106 }
4107
4108 if (abort_if_fail)
4109 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4110 else
4111 return ((*new_rec == NULL) && (*old_rec == NULL));
4112 return false;
4113 }
4114
4115
4116 /* Return true if the existing insn refs information is complete and
4117 correct. Otherwise (i.e. if there's any missing or extra refs),
4118 return the correct df_ref chain in REFS_RETURN.
4119
4120 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4121 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4122 verification mode instead of the whole function, so unmark
4123 everything.
4124
4125 If ABORT_IF_FAIL is set, this function never returns false. */
4126
4127 static bool
4128 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4129 basic_block bb,
4130 rtx insn,
4131 bool abort_if_fail)
4132 {
4133 bool ret1, ret2, ret3, ret4;
4134 unsigned int uid = INSN_UID (insn);
4135
4136 df_insn_refs_collect (collection_rec, bb, insn);
4137
4138 if (!DF_INSN_UID_DEFS (uid))
4139 {
4140 /* The insn_rec was created but it was never filled out. */
4141 if (abort_if_fail)
4142 gcc_assert (0);
4143 else
4144 return false;
4145 }
4146
4147 /* Unfortunately we cannot opt out early if one of these is not
4148 right because the marks will not get cleared. */
4149 ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4150 abort_if_fail);
4151 ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
4152 abort_if_fail);
4153 ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4154 abort_if_fail);
4155 ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4156 abort_if_fail);
4157 return (ret1 && ret2 && ret3 && ret4);
4158 }
4159
4160
4161 /* Return true if all refs in the basic block are correct and complete.
4162 Due to df_ref_chain_verify, it will cause all refs
4163 that are verified to have DF_REF_MARK bit set. */
4164
4165 static bool
4166 df_bb_verify (basic_block bb)
4167 {
4168 rtx insn;
4169 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4170 struct df_collection_rec collection_rec;
4171
4172 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4173 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
4174 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
4175 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
4176 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
4177
4178 gcc_assert (bb_info);
4179
4180 /* Scan the block an insn at a time from beginning to end. */
4181 FOR_BB_INSNS_REVERSE (bb, insn)
4182 {
4183 if (!INSN_P (insn))
4184 continue;
4185 df_insn_refs_verify (&collection_rec, bb, insn, true);
4186 df_free_collection_rec (&collection_rec);
4187 }
4188
4189 /* Do the artificial defs and uses. */
4190 df_bb_refs_collect (&collection_rec, bb);
4191 df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4192 df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4193 df_free_collection_rec (&collection_rec);
4194
4195 return true;
4196 }
4197
4198
4199 /* Returns true if the entry block has correct and complete df_ref set.
4200 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4201
4202 static bool
4203 df_entry_block_bitmap_verify (bool abort_if_fail)
4204 {
4205 bitmap entry_block_defs = BITMAP_ALLOC (&df_bitmap_obstack);
4206 bool is_eq;
4207
4208 df_get_entry_block_def_set (entry_block_defs);
4209
4210 is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
4211
4212 if (!is_eq && abort_if_fail)
4213 {
4214 print_current_pass (stderr);
4215 fprintf (stderr, "entry_block_defs = ");
4216 df_print_regset (stderr, entry_block_defs);
4217 fprintf (stderr, "df->entry_block_defs = ");
4218 df_print_regset (stderr, df->entry_block_defs);
4219 gcc_assert (0);
4220 }
4221
4222 BITMAP_FREE (entry_block_defs);
4223
4224 return is_eq;
4225 }
4226
4227
4228 /* Returns true if the exit block has correct and complete df_ref set.
4229 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4230
4231 static bool
4232 df_exit_block_bitmap_verify (bool abort_if_fail)
4233 {
4234 bitmap exit_block_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4235 bool is_eq;
4236
4237 df_get_exit_block_use_set (exit_block_uses);
4238
4239 is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
4240
4241 if (!is_eq && abort_if_fail)
4242 {
4243 print_current_pass (stderr);
4244 fprintf (stderr, "exit_block_uses = ");
4245 df_print_regset (stderr, exit_block_uses);
4246 fprintf (stderr, "df->exit_block_uses = ");
4247 df_print_regset (stderr, df->exit_block_uses);
4248 gcc_assert (0);
4249 }
4250
4251 BITMAP_FREE (exit_block_uses);
4252
4253 return is_eq;
4254 }
4255
4256
4257 /* Return true if df_ref information for all insns in all BLOCKS are
4258 correct and complete. If BLOCKS is null, all blocks are
4259 checked. */
4260
4261 void
4262 df_scan_verify (void)
4263 {
4264 unsigned int i;
4265 basic_block bb;
4266 bitmap regular_block_artificial_uses;
4267 bitmap eh_block_artificial_uses;
4268
4269 if (!df)
4270 return;
4271
4272 /* This is a hack, but a necessary one. If you do not do this,
4273 insn_attrtab can never be compiled in a bootstrap. This
4274 verification is just too expensive. */
4275 if (n_basic_blocks > 250)
4276 return;
4277
4278 /* Verification is a 4 step process. */
4279
4280 /* (1) All of the refs are marked by going thru the reg chains. */
4281 for (i = 0; i < DF_REG_SIZE (df); i++)
4282 {
4283 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4284 == DF_REG_DEF_COUNT(i));
4285 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4286 == DF_REG_USE_COUNT(i));
4287 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4288 == DF_REG_EQ_USE_COUNT(i));
4289 }
4290
4291 /* (2) There are various bitmaps whose value may change over the
4292 course of the compilation. This step recomputes them to make
4293 sure that they have not slipped out of date. */
4294 regular_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4295 eh_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4296
4297 df_get_regular_block_artificial_uses (regular_block_artificial_uses);
4298 df_get_eh_block_artificial_uses (eh_block_artificial_uses);
4299
4300 bitmap_ior_into (eh_block_artificial_uses,
4301 regular_block_artificial_uses);
4302
4303 /* Check artificial_uses bitmaps didn't change. */
4304 gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
4305 df->regular_block_artificial_uses));
4306 gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
4307 df->eh_block_artificial_uses));
4308
4309 BITMAP_FREE (regular_block_artificial_uses);
4310 BITMAP_FREE (eh_block_artificial_uses);
4311
4312 /* Verify entry block and exit block. These only verify the bitmaps,
4313 the refs are verified in df_bb_verify. */
4314 df_entry_block_bitmap_verify (true);
4315 df_exit_block_bitmap_verify (true);
4316
4317 /* (3) All of the insns in all of the blocks are traversed and the
4318 marks are cleared both in the artificial refs attached to the
4319 blocks and the real refs inside the insns. It is a failure to
4320 clear a mark that has not been set as this means that the ref in
4321 the block or insn was not in the reg chain. */
4322
4323 FOR_ALL_BB (bb)
4324 df_bb_verify (bb);
4325
4326 /* (4) See if all reg chains are traversed a second time. This time
4327 a check is made that the marks are clear. A set mark would be a
4328 from a reg that is not in any insn or basic block. */
4329
4330 for (i = 0; i < DF_REG_SIZE (df); i++)
4331 {
4332 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4333 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4334 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));
4335 }
4336 }
This page took 0.231089 seconds and 5 git commands to generate.