]> gcc.gnu.org Git - gcc.git/blame - gcc/passes.c
Fixed some stack size allocation in G5's mixed mode.
[gcc.git] / gcc / passes.c
CommitLineData
f6db1481
RH
1/* Top level of GCC compilers (cc1, cc1plus, etc.)
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22/* This is the top level of cc1/c++.
23 It parses command args, opens files, invokes the various passes
24 in the proper order, and counts the time used by each.
25 Error messages and low-level interface to malloc also handled here. */
26
27#include "config.h"
28#undef FLOAT /* This is for hpux. They should change hpux. */
29#undef FFS /* Some systems define this in param.h. */
30#include "system.h"
31#include "coretypes.h"
32#include "tm.h"
33#include <signal.h>
34
35#ifdef HAVE_SYS_RESOURCE_H
36# include <sys/resource.h>
37#endif
38
39#ifdef HAVE_SYS_TIMES_H
40# include <sys/times.h>
41#endif
42
43#include "line-map.h"
44#include "input.h"
45#include "tree.h"
46#include "rtl.h"
47#include "tm_p.h"
48#include "flags.h"
49#include "insn-attr.h"
50#include "insn-config.h"
51#include "insn-flags.h"
52#include "hard-reg-set.h"
53#include "recog.h"
54#include "output.h"
55#include "except.h"
56#include "function.h"
57#include "toplev.h"
58#include "expr.h"
59#include "basic-block.h"
60#include "intl.h"
61#include "ggc.h"
62#include "graph.h"
63#include "loop.h"
64#include "regs.h"
65#include "timevar.h"
66#include "diagnostic.h"
67#include "params.h"
68#include "reload.h"
69#include "dwarf2asm.h"
70#include "integrate.h"
71#include "real.h"
72#include "debug.h"
73#include "target.h"
74#include "langhooks.h"
75#include "cfglayout.h"
76#include "cfgloop.h"
77#include "hosthooks.h"
78#include "cgraph.h"
79#include "opts.h"
80#include "coverage.h"
81#include "value-prof.h"
82#include "alloc-pool.h"
83
84#if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
85#include "dwarf2out.h"
86#endif
87
88#if defined(DBX_DEBUGGING_INFO) || defined(XCOFF_DEBUGGING_INFO)
89#include "dbxout.h"
90#endif
91
92#ifdef SDB_DEBUGGING_INFO
93#include "sdbout.h"
94#endif
95
96#ifdef XCOFF_DEBUGGING_INFO
97#include "xcoffout.h" /* Needed for external data
98 declarations for e.g. AIX 4.x. */
99#endif
100
101#ifndef HAVE_conditional_execution
102#define HAVE_conditional_execution 0
103#endif
104
105/* Format to use to print dumpfile index value */
106#ifndef DUMPFILE_FORMAT
107#define DUMPFILE_FORMAT ".%02d."
108#endif
109
110/* Describes a dump file. */
111
112struct dump_file_info
113{
114 /* The unique extension to apply, e.g. ".jump". */
115 const char *const extension;
116
117 /* The -d<c> character that enables this dump file. */
118 char const debug_switch;
119
120 /* True if there is a corresponding graph dump file. */
121 char const graph_dump_p;
122
123 /* True if the user selected this dump. */
124 char enabled;
125
126 /* True if the files have been initialized (ie truncated). */
127 char initialized;
128};
129
130/* Enumerate the extant dump files. */
131
132enum dump_file_index
133{
134 DFI_cgraph,
135 DFI_rtl,
136 DFI_sibling,
137 DFI_eh,
138 DFI_jump,
139 DFI_null,
140 DFI_cse,
141 DFI_addressof,
142 DFI_gcse,
143 DFI_loop,
144 DFI_bypass,
145 DFI_cfg,
146 DFI_bp,
147 DFI_vpt,
148 DFI_ce1,
149 DFI_tracer,
150 DFI_loop2,
151 DFI_web,
152 DFI_cse2,
153 DFI_life,
154 DFI_combine,
155 DFI_ce2,
156 DFI_regmove,
157 DFI_sched,
158 DFI_lreg,
159 DFI_greg,
160 DFI_postreload,
f9957958 161 DFI_gcse2,
f6db1481
RH
162 DFI_flow2,
163 DFI_peephole2,
164 DFI_ce3,
165 DFI_rnreg,
166 DFI_bbro,
167 DFI_branch_target_load,
168 DFI_sched2,
169 DFI_stack,
170 DFI_vartrack,
171 DFI_mach,
172 DFI_dbr,
173 DFI_MAX
174};
175
176/* Describes all the dump files. Should be kept in order of the
177 pass and in sync with dump_file_index above.
178
179 Remaining -d letters:
180
181 " e m q "
f9957958 182 " K O Q WXY "
f6db1481
RH
183*/
184
185static struct dump_file_info dump_file_tbl[DFI_MAX] =
186{
187 { "cgraph", 'U', 0, 0, 0 },
188 { "rtl", 'r', 0, 0, 0 },
189 { "sibling", 'i', 0, 0, 0 },
190 { "eh", 'h', 0, 0, 0 },
191 { "jump", 'j', 0, 0, 0 },
192 { "null", 'u', 0, 0, 0 },
193 { "cse", 's', 0, 0, 0 },
194 { "addressof", 'F', 0, 0, 0 },
195 { "gcse", 'G', 1, 0, 0 },
196 { "loop", 'L', 1, 0, 0 },
197 { "bypass", 'G', 1, 0, 0 }, /* Yes, duplicate enable switch. */
198 { "cfg", 'f', 1, 0, 0 },
199 { "bp", 'b', 1, 0, 0 },
200 { "vpt", 'V', 1, 0, 0 },
201 { "ce1", 'C', 1, 0, 0 },
202 { "tracer", 'T', 1, 0, 0 },
203 { "loop2", 'L', 1, 0, 0 },
204 { "web", 'Z', 0, 0, 0 },
205 { "cse2", 't', 1, 0, 0 },
206 { "life", 'f', 1, 0, 0 }, /* Yes, duplicate enable switch. */
207 { "combine", 'c', 1, 0, 0 },
208 { "ce2", 'C', 1, 0, 0 },
209 { "regmove", 'N', 1, 0, 0 },
210 { "sched", 'S', 1, 0, 0 },
211 { "lreg", 'l', 1, 0, 0 },
212 { "greg", 'g', 1, 0, 0 },
213 { "postreload", 'o', 1, 0, 0 },
f9957958 214 { "gcse2", 'J', 0, 0, 0 },
f6db1481
RH
215 { "flow2", 'w', 1, 0, 0 },
216 { "peephole2", 'z', 1, 0, 0 },
217 { "ce3", 'E', 1, 0, 0 },
218 { "rnreg", 'n', 1, 0, 0 },
219 { "bbro", 'B', 1, 0, 0 },
220 { "btl", 'd', 1, 0, 0 }, /* Yes, duplicate enable switch. */
221 { "sched2", 'R', 1, 0, 0 },
222 { "stack", 'k', 1, 0, 0 },
223 { "vartrack", 'V', 1, 0, 0 }, /* Yes, duplicate enable switch. */
224 { "mach", 'M', 1, 0, 0 },
225 { "dbr", 'd', 0, 0, 0 },
226};
227
228/* Routine to open a dump file. Return true if the dump file is enabled. */
229
230static int
231open_dump_file (enum dump_file_index index, tree decl)
232{
233 char *dump_name;
234 const char *open_arg;
235 char seq[16];
236
237 if (! dump_file_tbl[index].enabled)
238 return 0;
239
240 timevar_push (TV_DUMP);
241 if (dump_file != NULL)
242 fclose (dump_file);
243
244 sprintf (seq, DUMPFILE_FORMAT, index);
245
246 if (! dump_file_tbl[index].initialized)
247 {
248 /* If we've not initialized the files, do so now. */
249 if (graph_dump_format != no_graph
250 && dump_file_tbl[index].graph_dump_p)
251 {
252 dump_name = concat (seq, dump_file_tbl[index].extension, NULL);
253 clean_graph_dump_file (dump_base_name, dump_name);
254 free (dump_name);
255 }
256 dump_file_tbl[index].initialized = 1;
257 open_arg = "w";
258 }
259 else
260 open_arg = "a";
261
262 dump_name = concat (dump_base_name, seq,
263 dump_file_tbl[index].extension, NULL);
264
265 dump_file = fopen (dump_name, open_arg);
266 if (dump_file == NULL)
267 fatal_error ("can't open %s: %m", dump_name);
268
269 free (dump_name);
270
271 if (decl)
272 fprintf (dump_file, "\n;; Function %s%s\n\n",
ae2bcd98 273 lang_hooks.decl_printable_name (decl, 2),
f6db1481
RH
274 cfun->function_frequency == FUNCTION_FREQUENCY_HOT
275 ? " (hot)"
276 : cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
277 ? " (unlikely executed)"
278 : "");
279
280 timevar_pop (TV_DUMP);
281 return 1;
282}
283
284/* Routine to close a dump file. */
285
286static void
287close_dump_file (enum dump_file_index index,
288 void (*func) (FILE *, rtx),
289 rtx insns)
290{
291 if (! dump_file)
292 return;
293
294 timevar_push (TV_DUMP);
295 if (insns
296 && graph_dump_format != no_graph
297 && dump_file_tbl[index].graph_dump_p)
298 {
299 char seq[16];
300 char *suffix;
301
302 sprintf (seq, DUMPFILE_FORMAT, index);
303 suffix = concat (seq, dump_file_tbl[index].extension, NULL);
304 print_rtl_graph_with_bb (dump_base_name, suffix, insns);
305 free (suffix);
306 }
307
308 if (func && insns)
309 func (dump_file, insns);
310
311 fflush (dump_file);
312 fclose (dump_file);
313
314 dump_file = NULL;
315 timevar_pop (TV_DUMP);
316}
317
318/* This is called from various places for FUNCTION_DECL, VAR_DECL,
319 and TYPE_DECL nodes.
320
321 This does nothing for local (non-static) variables, unless the
322 variable is a register variable with an ASMSPEC. In that case, or
323 if the variable is not an automatic, it sets up the RTL and
324 outputs any assembler code (label definition, storage allocation
325 and initialization).
326
327 DECL is the declaration. If ASMSPEC is nonzero, it specifies
328 the assembler symbol name to be used. TOP_LEVEL is nonzero
329 if this declaration is not within a function. */
330
331void
332rest_of_decl_compilation (tree decl,
333 const char *asmspec,
334 int top_level,
335 int at_end)
336{
337 /* We deferred calling assemble_alias so that we could collect
338 other attributes such as visibility. Emit the alias now. */
339 {
340 tree alias;
341 alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
342 if (alias)
343 {
344 alias = TREE_VALUE (TREE_VALUE (alias));
345 alias = get_identifier (TREE_STRING_POINTER (alias));
346 assemble_alias (decl, alias);
347 }
348 }
349
350 /* Forward declarations for nested functions are not "external",
351 but we need to treat them as if they were. */
352 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
353 || TREE_CODE (decl) == FUNCTION_DECL)
354 {
355 timevar_push (TV_VARCONST);
356
357 if (asmspec)
358 make_decl_rtl (decl, asmspec);
359
360 /* Don't output anything when a tentative file-scope definition
361 is seen. But at end of compilation, do output code for them.
362
363 We do output all variables when unit-at-a-time is active and rely on
364 callgraph code to defer them except for forward declarations
365 (see gcc.c-torture/compile/920624-1.c) */
366 if ((at_end
367 || !DECL_DEFER_OUTPUT (decl)
368 || (flag_unit_at_a_time && DECL_INITIAL (decl)))
369 && !DECL_EXTERNAL (decl))
370 {
371 if (flag_unit_at_a_time && !cgraph_global_info_ready
372 && TREE_CODE (decl) != FUNCTION_DECL && top_level)
373 cgraph_varpool_finalize_decl (decl);
374 else
375 assemble_variable (decl, top_level, at_end, 0);
376 }
377
378#ifdef ASM_FINISH_DECLARE_OBJECT
379 if (decl == last_assemble_variable_decl)
380 {
381 ASM_FINISH_DECLARE_OBJECT (asm_out_file, decl,
382 top_level, at_end);
383 }
384#endif
385
386 timevar_pop (TV_VARCONST);
387 }
388 else if (DECL_REGISTER (decl) && asmspec != 0)
389 {
390 if (decode_reg_name (asmspec) >= 0)
391 {
392 SET_DECL_RTL (decl, NULL_RTX);
393 make_decl_rtl (decl, asmspec);
394 }
395 else
396 {
397 error ("invalid register name `%s' for register variable", asmspec);
398 DECL_REGISTER (decl) = 0;
399 if (!top_level)
400 expand_decl (decl);
401 }
402 }
403 else if (TREE_CODE (decl) == TYPE_DECL)
404 {
405 timevar_push (TV_SYMOUT);
406 debug_hooks->type_decl (decl, !top_level);
407 timevar_pop (TV_SYMOUT);
408 }
409}
410
411/* Called after finishing a record, union or enumeral type. */
412
413void
414rest_of_type_compilation (tree type, int toplev)
415{
416 /* Avoid confusing the debug information machinery when there are
417 errors. */
418 if (errorcount != 0 || sorrycount != 0)
419 return;
420
421 timevar_push (TV_SYMOUT);
422 debug_hooks->type_decl (TYPE_STUB_DECL (type), !toplev);
423 timevar_pop (TV_SYMOUT);
424}
425
426/* Turn the RTL into assembly. */
427static void
428rest_of_handle_final (tree decl, rtx insns)
429{
430 timevar_push (TV_FINAL);
431 {
432 rtx x;
433 const char *fnname;
434
435 /* Get the function's name, as described by its RTL. This may be
436 different from the DECL_NAME name used in the source file. */
437
438 x = DECL_RTL (decl);
439 if (GET_CODE (x) != MEM)
440 abort ();
441 x = XEXP (x, 0);
442 if (GET_CODE (x) != SYMBOL_REF)
443 abort ();
444 fnname = XSTR (x, 0);
445
446 assemble_start_function (decl, fnname);
447 final_start_function (insns, asm_out_file, optimize);
448 final (insns, asm_out_file, optimize, 0);
449 final_end_function ();
450
451#ifdef IA64_UNWIND_INFO
452 /* ??? The IA-64 ".handlerdata" directive must be issued before
453 the ".endp" directive that closes the procedure descriptor. */
454 output_function_exception_table ();
455#endif
456
457 assemble_end_function (decl, fnname);
458
459#ifndef IA64_UNWIND_INFO
460 /* Otherwise, it feels unclean to switch sections in the middle. */
461 output_function_exception_table ();
462#endif
463
464 if (! quiet_flag)
465 fflush (asm_out_file);
466
467 /* Release all memory allocated by flow. */
468 free_basic_block_vars (0);
469
470 /* Release all memory held by regsets now. */
471 regset_release_memory ();
472 }
473 timevar_pop (TV_FINAL);
474
475 ggc_collect ();
476}
477
478#ifdef DELAY_SLOTS
479/* Run delay slot optimization. */
480static void
481rest_of_handle_delay_slots (tree decl, rtx insns)
482{
483 timevar_push (TV_DBR_SCHED);
484 open_dump_file (DFI_dbr, decl);
485
486 dbr_schedule (insns, dump_file);
487
488 close_dump_file (DFI_dbr, print_rtl, insns);
489 timevar_pop (TV_DBR_SCHED);
490
491 ggc_collect ();
492}
493#endif
494
495#ifdef STACK_REGS
496/* Convert register usage from flat register file usage to a stack
497 register file. */
498static void
499rest_of_handle_stack_regs (tree decl, rtx insns)
500{
501#if defined (HAVE_ATTR_length)
502 /* If flow2 creates new instructions which need splitting
503 and scheduling after reload is not done, they might not be
504 split until final which doesn't allow splitting
505 if HAVE_ATTR_length. */
506#ifdef INSN_SCHEDULING
507 if (optimize && !flag_schedule_insns_after_reload)
508#else
509 if (optimize)
510#endif
511 {
512 timevar_push (TV_SHORTEN_BRANCH);
513 split_all_insns (1);
514 timevar_pop (TV_SHORTEN_BRANCH);
515 }
516#endif
517
518 timevar_push (TV_REG_STACK);
519 open_dump_file (DFI_stack, decl);
520
521 if (reg_to_stack (insns, dump_file) && optimize)
522 {
523 if (cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_POST_REGSTACK
524 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0))
525 && flag_reorder_blocks)
526 {
527 reorder_basic_blocks ();
528 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_POST_REGSTACK);
529 }
530 }
531
532 close_dump_file (DFI_stack, print_rtl_with_bb, insns);
533 timevar_pop (TV_REG_STACK);
534
535 ggc_collect ();
536}
537#endif
538
539/* Track the variables, ie. compute where the variable is stored at each position in function. */
540static void
541rest_of_handle_variable_tracking (tree decl, rtx insns)
542{
543 timevar_push (TV_VAR_TRACKING);
544 open_dump_file (DFI_vartrack, decl);
545
546 variable_tracking_main ();
547
548 close_dump_file (DFI_vartrack, print_rtl_with_bb, insns);
549 timevar_pop (TV_VAR_TRACKING);
550}
551
552/* Machine independent reorg pass. */
553static void
554rest_of_handle_machine_reorg (tree decl, rtx insns)
555{
556 timevar_push (TV_MACH_DEP);
557 open_dump_file (DFI_mach, decl);
558
559 (*targetm.machine_dependent_reorg) ();
560
561 close_dump_file (DFI_mach, print_rtl, insns);
562 timevar_pop (TV_MACH_DEP);
563
564 ggc_collect ();
565}
566
567
568/* Run new register allocator. Return TRUE if we must exit
569 rest_of_compilation upon return. */
570static bool
571rest_of_handle_new_regalloc (tree decl, rtx insns)
572{
573 int failure;
574
575 delete_trivially_dead_insns (insns, max_reg_num ());
576 reg_alloc ();
577
578 timevar_pop (TV_LOCAL_ALLOC);
579 if (dump_file_tbl[DFI_lreg].enabled)
580 {
581 timevar_push (TV_DUMP);
582
583 close_dump_file (DFI_lreg, NULL, NULL);
584 timevar_pop (TV_DUMP);
585 }
586
587 /* XXX clean up the whole mess to bring live info in shape again. */
588 timevar_push (TV_GLOBAL_ALLOC);
589 open_dump_file (DFI_greg, decl);
590
591 build_insn_chain (insns);
592 failure = reload (insns, 0);
593
594 timevar_pop (TV_GLOBAL_ALLOC);
595
596 if (dump_file_tbl[DFI_greg].enabled)
597 {
598 timevar_push (TV_DUMP);
599
600 dump_global_regs (dump_file);
601
602 close_dump_file (DFI_greg, print_rtl_with_bb, insns);
603 timevar_pop (TV_DUMP);
604 }
605
606 if (failure)
607 return true;
608
609 reload_completed = 1;
610
611 return false;
612}
613
614/* Run old register allocator. Return TRUE if we must exit
615 rest_of_compilation upon return. */
616static bool
617rest_of_handle_old_regalloc (tree decl, rtx insns)
618{
619 int failure;
620 int rebuild_notes;
621
622 /* Allocate the reg_renumber array. */
623 allocate_reg_info (max_regno, FALSE, TRUE);
624
625 /* And the reg_equiv_memory_loc array. */
626 reg_equiv_memory_loc = xcalloc (max_regno, sizeof (rtx));
627
628 allocate_initial_values (reg_equiv_memory_loc);
629
630 regclass (insns, max_reg_num (), dump_file);
631 rebuild_notes = local_alloc ();
632
633 timevar_pop (TV_LOCAL_ALLOC);
634
635 /* Local allocation may have turned an indirect jump into a direct
636 jump. If so, we must rebuild the JUMP_LABEL fields of jumping
637 instructions. */
638 if (rebuild_notes)
639 {
640 timevar_push (TV_JUMP);
641
642 rebuild_jump_labels (insns);
643 purge_all_dead_edges (0);
644
645 timevar_pop (TV_JUMP);
646 }
647
648 if (dump_file_tbl[DFI_lreg].enabled)
649 {
650 timevar_push (TV_DUMP);
651
652 dump_flow_info (dump_file);
653 dump_local_alloc (dump_file);
654
655 close_dump_file (DFI_lreg, print_rtl_with_bb, insns);
656 timevar_pop (TV_DUMP);
657 }
658
659 ggc_collect ();
660
661 timevar_push (TV_GLOBAL_ALLOC);
662 open_dump_file (DFI_greg, decl);
663
664 /* If optimizing, allocate remaining pseudo-regs. Do the reload
665 pass fixing up any insns that are invalid. */
666
667 if (optimize)
668 failure = global_alloc (dump_file);
669 else
670 {
671 build_insn_chain (insns);
672 failure = reload (insns, 0);
673 }
674
675 timevar_pop (TV_GLOBAL_ALLOC);
676
677 if (dump_file_tbl[DFI_greg].enabled)
678 {
679 timevar_push (TV_DUMP);
680
681 dump_global_regs (dump_file);
682
683 close_dump_file (DFI_greg, print_rtl_with_bb, insns);
684 timevar_pop (TV_DUMP);
685 }
686
687 return failure;
688}
689
690/* Run the regrename and cprop passes. */
691static void
692rest_of_handle_regrename (tree decl, rtx insns)
693{
694 timevar_push (TV_RENAME_REGISTERS);
695 open_dump_file (DFI_rnreg, decl);
696
697 if (flag_rename_registers)
698 regrename_optimize ();
699 if (flag_cprop_registers)
700 copyprop_hardreg_forward ();
701
702 close_dump_file (DFI_rnreg, print_rtl_with_bb, insns);
703 timevar_pop (TV_RENAME_REGISTERS);
704}
705
706/* Reorder basic blocks. */
707static void
708rest_of_handle_reorder_blocks (tree decl, rtx insns)
709{
710 bool changed;
711 open_dump_file (DFI_bbro, decl);
712
713 /* Last attempt to optimize CFG, as scheduling, peepholing and insn
714 splitting possibly introduced more crossjumping opportunities. */
715 changed = cleanup_cfg (CLEANUP_EXPENSIVE
716 | (!HAVE_conditional_execution
717 ? CLEANUP_UPDATE_LIFE : 0));
718
719 if (flag_sched2_use_traces && flag_schedule_insns_after_reload)
720 tracer ();
721 if (flag_reorder_blocks)
722 reorder_basic_blocks ();
723 if (flag_reorder_blocks
724 || (flag_sched2_use_traces && flag_schedule_insns_after_reload))
725 changed |= cleanup_cfg (CLEANUP_EXPENSIVE
726 | (!HAVE_conditional_execution
727 ? CLEANUP_UPDATE_LIFE : 0));
728
729 /* On conditional execution targets we can not update the life cheaply, so
730 we deffer the updating to after both cleanups. This may lose some cases
731 but should not be terribly bad. */
732 if (changed && HAVE_conditional_execution)
733 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
734 PROP_DEATH_NOTES);
735 close_dump_file (DFI_bbro, print_rtl_with_bb, insns);
736}
737
738#ifdef INSN_SCHEDULING
739/* Run instruction scheduler. */
740static void
741rest_of_handle_sched (tree decl, rtx insns)
742{
743 timevar_push (TV_SCHED);
744
745 /* Print function header into sched dump now
746 because doing the sched analysis makes some of the dump. */
747 if (optimize > 0 && flag_schedule_insns)
748 {
749 open_dump_file (DFI_sched, decl);
750
751 /* Do control and data sched analysis,
752 and write some of the results to dump file. */
753
754 schedule_insns (dump_file);
755
756 close_dump_file (DFI_sched, print_rtl_with_bb, insns);
757 }
758 timevar_pop (TV_SCHED);
759
760 ggc_collect ();
761}
762
763/* Run second scheduling pass after reload. */
764static void
765rest_of_handle_sched2 (tree decl, rtx insns)
766{
767 timevar_push (TV_SCHED2);
768 open_dump_file (DFI_sched2, decl);
769
770 /* Do control and data sched analysis again,
771 and write some more of the results to dump file. */
772
773 split_all_insns (1);
774
775 if (flag_sched2_use_superblocks || flag_sched2_use_traces)
776 {
777 schedule_ebbs (dump_file);
778 /* No liveness updating code yet, but it should be easy to do.
4ee31f1e 779 reg-stack recomputes the liveness when needed for now. */
f6db1481
RH
780 count_or_remove_death_notes (NULL, 1);
781 cleanup_cfg (CLEANUP_EXPENSIVE);
782 }
783 else
784 schedule_insns (dump_file);
785
786 close_dump_file (DFI_sched2, print_rtl_with_bb, insns);
787 timevar_pop (TV_SCHED2);
788
789 ggc_collect ();
790}
791#endif
792
f9957958
MH
793static void
794rest_of_handle_gcse2 (tree decl, rtx insns)
795{
796 open_dump_file (DFI_gcse2, decl);
797
798 gcse_after_reload_main (insns, dump_file);
799 rebuild_jump_labels (insns);
800 delete_trivially_dead_insns (insns, max_reg_num ());
801 close_dump_file (DFI_gcse2, print_rtl_with_bb, insns);
802
803 ggc_collect ();
804
805#ifdef ENABLE_CHECKING
806 verify_flow_info ();
807#endif
808}
809
f6db1481
RH
810/* Register allocation pre-pass, to reduce number of moves necessary
811 for two-address machines. */
812static void
813rest_of_handle_regmove (tree decl, rtx insns)
814{
815 timevar_push (TV_REGMOVE);
816 open_dump_file (DFI_regmove, decl);
817
818 regmove_optimize (insns, max_reg_num (), dump_file);
819
820 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
821 close_dump_file (DFI_regmove, print_rtl_with_bb, insns);
822 timevar_pop (TV_REGMOVE);
823
824 ggc_collect ();
825}
826
827/* Run tracer. */
828static void
829rest_of_handle_tracer (tree decl, rtx insns)
830{
831 open_dump_file (DFI_tracer, decl);
832 if (dump_file)
833 dump_flow_info (dump_file);
834 tracer ();
835 cleanup_cfg (CLEANUP_EXPENSIVE);
836 reg_scan (insns, max_reg_num (), 0);
837 close_dump_file (DFI_tracer, print_rtl_with_bb, get_insns ());
838}
839
840/* If-conversion and CFG cleanup. */
841static void
842rest_of_handle_if_conversion (tree decl, rtx insns)
843{
844 open_dump_file (DFI_ce1, decl);
845 if (flag_if_conversion)
846 {
847 timevar_push (TV_IFCVT);
848 if (dump_file)
849 dump_flow_info (dump_file);
850 cleanup_cfg (CLEANUP_EXPENSIVE);
851 reg_scan (insns, max_reg_num (), 0);
852 if_convert (0);
853 timevar_pop (TV_IFCVT);
854 }
855 timevar_push (TV_JUMP);
856 cleanup_cfg (CLEANUP_EXPENSIVE);
857 reg_scan (insns, max_reg_num (), 0);
858 timevar_pop (TV_JUMP);
859 close_dump_file (DFI_ce1, print_rtl_with_bb, get_insns ());
860}
861
862/* Rerun if-conversion, as combine may have simplified things enough
863 to now meet sequence length restrictions. */
864static void
865rest_of_handle_if_after_combine (tree decl, rtx insns)
866{
867 timevar_push (TV_IFCVT);
868 open_dump_file (DFI_ce2, decl);
869
870 no_new_pseudos = 0;
871 if_convert (1);
872 no_new_pseudos = 1;
873
874 close_dump_file (DFI_ce2, print_rtl_with_bb, insns);
875 timevar_pop (TV_IFCVT);
876}
877
878static void
879rest_of_handle_web (tree decl, rtx insns)
880{
881 open_dump_file (DFI_web, decl);
882 timevar_push (TV_WEB);
883 web_main ();
884 delete_trivially_dead_insns (insns, max_reg_num ());
885 cleanup_cfg (CLEANUP_EXPENSIVE);
886
887 timevar_pop (TV_WEB);
888 close_dump_file (DFI_web, print_rtl_with_bb, insns);
889 reg_scan (get_insns (), max_reg_num (), 0);
890}
891
892/* Do branch profiling and static profile estimation passes. */
893static void
894rest_of_handle_branch_prob (tree decl, rtx insns)
895{
896 struct loops loops;
897
898 timevar_push (TV_BRANCH_PROB);
899 open_dump_file (DFI_bp, decl);
900
901 if (profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
902 branch_prob ();
903
904 /* Discover and record the loop depth at the head of each basic
905 block. The loop infrastructure does the real job for us. */
906 flow_loops_find (&loops, LOOP_TREE);
907
908 if (dump_file)
909 flow_loops_dump (&loops, dump_file, NULL, 0);
910
911 /* Estimate using heuristics if no profiling info is available. */
912 if (flag_guess_branch_prob)
913 estimate_probability (&loops);
914
915 flow_loops_free (&loops);
916 free_dominance_info (CDI_DOMINATORS);
917 close_dump_file (DFI_bp, print_rtl_with_bb, insns);
918 timevar_pop (TV_BRANCH_PROB);
919}
920
921/* Do optimizations based on expression value profiles. */
922static void
923rest_of_handle_value_profile_transformations (tree decl, rtx insns)
924{
925 open_dump_file (DFI_vpt, decl);
926 timevar_push (TV_VPT);
927
928 if (value_profile_transformations ())
929 cleanup_cfg (CLEANUP_EXPENSIVE);
930
931 timevar_pop (TV_VPT);
932 close_dump_file (DFI_vpt, print_rtl_with_bb, insns);
933}
934
935/* Do control and data flow analysis; write some of the results to the
936 dump file. */
937static void
938rest_of_handle_cfg (tree decl, rtx insns)
939{
940 open_dump_file (DFI_cfg, decl);
941 if (dump_file)
942 dump_flow_info (dump_file);
943 if (optimize)
944 cleanup_cfg (CLEANUP_EXPENSIVE
945 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
946
947 /* It may make more sense to mark constant functions after dead code is
948 eliminated by life_analysis, but we need to do it early, as -fprofile-arcs
949 may insert code making function non-constant, but we still must consider
950 it as constant, otherwise -fbranch-probabilities will not read data back.
951
952 life_analysis rarely eliminates modification of external memory.
953 */
954 if (optimize)
955 {
956 /* Alias analysis depends on this information and mark_constant_function
957 depends on alias analysis. */
958 reg_scan (insns, max_reg_num (), 1);
959 mark_constant_function ();
960 }
961
962 close_dump_file (DFI_cfg, print_rtl_with_bb, insns);
963}
964
965/* Purge addressofs. */
966static void
967rest_of_handle_addressof (tree decl, rtx insns)
968{
969 open_dump_file (DFI_addressof, decl);
970
971 purge_addressof (insns);
972 if (optimize && purge_all_dead_edges (0))
973 delete_unreachable_blocks ();
974 reg_scan (insns, max_reg_num (), 1);
975
976 close_dump_file (DFI_addressof, print_rtl, insns);
977}
978
979/* We may have potential sibling or tail recursion sites. Select one
980 (of possibly multiple) methods of performing the call. */
981static void
982rest_of_handle_sibling_calls (rtx insns)
983{
984 rtx insn;
985 optimize_sibling_and_tail_recursive_calls ();
986
987 /* Recompute the CFG as sibling optimization clobbers it randomly. */
988 free_bb_for_insn ();
989 find_exception_handler_labels ();
990 rebuild_jump_labels (insns);
991 find_basic_blocks (insns, max_reg_num (), dump_file);
992
993 /* There is pass ordering problem - we must lower NOTE_INSN_PREDICTION
994 notes before simplifying cfg and we must do lowering after sibcall
995 that unhides parts of RTL chain and cleans up the CFG.
996
997 Until sibcall is replaced by tree-level optimizer, lets just
998 sweep away the NOTE_INSN_PREDICTION notes that leaked out. */
999 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1000 if (GET_CODE (insn) == NOTE
1001 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION)
1002 delete_insn (insn);
1003
1004 close_dump_file (DFI_sibling, print_rtl, get_insns ());
1005}
1006
1007/* Perform jump bypassing and control flow optimizations. */
1008static void
1009rest_of_handle_jump_bypass (tree decl, rtx insns)
1010{
1011 timevar_push (TV_BYPASS);
1012 open_dump_file (DFI_bypass, decl);
1013
1014 cleanup_cfg (CLEANUP_EXPENSIVE);
1015 reg_scan (insns, max_reg_num (), 1);
1016
1017 if (bypass_jumps (dump_file))
1018 {
1019 rebuild_jump_labels (insns);
1020 cleanup_cfg (CLEANUP_EXPENSIVE);
1021 delete_trivially_dead_insns (insns, max_reg_num ());
1022 }
1023
1024 close_dump_file (DFI_bypass, print_rtl_with_bb, insns);
1025 timevar_pop (TV_BYPASS);
1026
1027 ggc_collect ();
1028
1029#ifdef ENABLE_CHECKING
1030 verify_flow_info ();
1031#endif
1032}
1033
1034/* Handle inlining of functions in rest_of_compilation. Return TRUE
1035 if we must exit rest_of_compilation upon return. */
1036static bool
1037rest_of_handle_inlining (tree decl)
1038{
1039 rtx insns;
1040 int inlinable = 0;
1041 tree parent;
1042 const char *lose;
1043
1044 /* If we are reconsidering an inline function at the end of
1045 compilation, skip the stuff for making it inline. */
1046 if (cfun->rtl_inline_init)
1047 return 0;
1048 cfun->rtl_inline_init = 1;
1049
1050 /* If this is nested inside an inlined external function, pretend
1051 it was only declared. Since we cannot inline such functions,
1052 generating code for this one is not only not necessary but will
1053 confuse some debugging output writers. */
1054 for (parent = DECL_CONTEXT (current_function_decl);
1055 parent != NULL_TREE;
1056 parent = get_containing_scope (parent))
1057 if (TREE_CODE (parent) == FUNCTION_DECL
1058 && DECL_INLINE (parent) && DECL_EXTERNAL (parent))
1059 {
1060 DECL_INITIAL (decl) = 0;
1061 return true;
1062 }
1063 else if (TYPE_P (parent))
1064 /* A function in a local class should be treated normally. */
1065 break;
1066
1067 /* If requested, consider whether to make this function inline. */
1068 if ((DECL_INLINE (decl) && !flag_no_inline)
1069 || flag_inline_functions)
1070 {
1071 timevar_push (TV_INTEGRATION);
1072 lose = function_cannot_inline_p (decl);
1073 timevar_pop (TV_INTEGRATION);
1074 if (lose || ! optimize)
1075 {
1076 if (warn_inline && lose && DECL_INLINE (decl))
1077 {
1078 char *msg = concat ("%J", lose, NULL);
1079 warning (msg, decl);
1080 free (msg);
1081 }
1082 DECL_ABSTRACT_ORIGIN (decl) = 0;
1083 /* Don't really compile an extern inline function.
1084 If we can't make it inline, pretend
1085 it was only declared. */
1086 if (DECL_EXTERNAL (decl))
1087 {
1088 DECL_INITIAL (decl) = 0;
1089 return true;
1090 }
1091 }
1092 else
1093 inlinable = DECL_INLINE (decl) = 1;
1094 }
1095
1096 insns = get_insns ();
1097
1098 /* Dump the rtl code if we are dumping rtl. */
1099
1100 if (open_dump_file (DFI_rtl, decl))
1101 {
1102 if (DECL_STRUCT_FUNCTION (decl)
1103 && DECL_STRUCT_FUNCTION (decl)->saved_for_inline)
1104 fprintf (dump_file, ";; (integrable)\n\n");
1105 close_dump_file (DFI_rtl, print_rtl, insns);
1106 }
1107
1108 /* Convert from NOTE_INSN_EH_REGION style notes, and do other
1109 sorts of eh initialization. Delay this until after the
1110 initial rtl dump so that we can see the original nesting. */
1111 convert_from_eh_region_ranges ();
1112
1113 /* If function is inline, and we don't yet know whether to
1114 compile it by itself, defer decision till end of compilation.
1115 wrapup_global_declarations will (indirectly) call
1116 rest_of_compilation again for those functions that need to
1117 be output. Also defer those functions that we are supposed
1118 to defer. */
1119
1120 if (inlinable
1121 || (DECL_INLINE (decl)
1122 /* Egad. This RTL deferral test conflicts with Fortran assumptions
1123 for unreferenced symbols. See g77.f-torture/execute/980520-1.f.
1124 But removing this line from the check breaks all languages that
1125 use the call graph to output symbols. This hard-coded check is
1126 the least invasive work-around. */
1127 && (flag_inline_functions
1128 || strcmp (lang_hooks.name, "GNU F77") == 0)
1129 && ((! TREE_PUBLIC (decl) && ! TREE_ADDRESSABLE (decl)
1130 && ! TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
1131 && ! flag_keep_inline_functions)
1132 || DECL_EXTERNAL (decl))))
1133 DECL_DEFER_OUTPUT (decl) = 1;
1134
1135 if (DECL_INLINE (decl))
1136 /* DWARF wants separate debugging info for abstract and
1137 concrete instances of all inline functions, including those
1138 declared inline but not inlined, and those inlined even
1139 though they weren't declared inline. Conveniently, that's
1140 what DECL_INLINE means at this point. */
1141 (*debug_hooks->deferred_inline_function) (decl);
1142
1143 if (DECL_DEFER_OUTPUT (decl))
1144 {
1145 /* If -Wreturn-type, we have to do a bit of compilation. We just
1146 want to call cleanup the cfg to figure out whether or not we can
1147 fall off the end of the function; we do the minimum amount of
1148 work necessary to make that safe. */
1149 if (warn_return_type)
1150 {
1151 int saved_optimize = optimize;
1152
1153 optimize = 0;
1154 rebuild_jump_labels (insns);
1155 find_exception_handler_labels ();
1156 find_basic_blocks (insns, max_reg_num (), dump_file);
1157 cleanup_cfg (CLEANUP_PRE_SIBCALL | CLEANUP_PRE_LOOP);
1158 optimize = saved_optimize;
1159
1160 /* CFG is no longer maintained up-to-date. */
1161 free_bb_for_insn ();
1162 }
1163
1164 set_nothrow_function_flags ();
1165 if (current_function_nothrow)
1166 /* Now we know that this can't throw; set the flag for the benefit
1167 of other functions later in this translation unit. */
1168 TREE_NOTHROW (current_function_decl) = 1;
1169
1170 timevar_push (TV_INTEGRATION);
1171 save_for_inline (decl);
1172 timevar_pop (TV_INTEGRATION);
1173 DECL_STRUCT_FUNCTION (decl)->inlinable = inlinable;
1174 return true;
1175 }
1176
1177 /* If specified extern inline but we aren't inlining it, we are
1178 done. This goes for anything that gets here with DECL_EXTERNAL
1179 set, not just things with DECL_INLINE. */
1180 return (bool) DECL_EXTERNAL (decl);
1181}
1182
1183/* Try to identify useless null pointer tests and delete them. */
1184static void
1185rest_of_handle_null_pointer (tree decl, rtx insns)
1186{
1187 open_dump_file (DFI_null, decl);
1188 if (dump_file)
1189 dump_flow_info (dump_file);
1190
1191 if (delete_null_pointer_checks (insns))
1192 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1193
1194 close_dump_file (DFI_null, print_rtl_with_bb, insns);
1195}
1196
1197/* Try combining insns through substitution. */
1198static void
1199rest_of_handle_combine (tree decl, rtx insns)
1200{
1201 int rebuild_jump_labels_after_combine = 0;
1202
1203 timevar_push (TV_COMBINE);
1204 open_dump_file (DFI_combine, decl);
1205
1206 rebuild_jump_labels_after_combine
1207 = combine_instructions (insns, max_reg_num ());
1208
1209 /* Combining insns may have turned an indirect jump into a
1210 direct jump. Rebuild the JUMP_LABEL fields of jumping
1211 instructions. */
1212 if (rebuild_jump_labels_after_combine)
1213 {
1214 timevar_push (TV_JUMP);
1215 rebuild_jump_labels (insns);
1216 timevar_pop (TV_JUMP);
1217
1218 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
1219 }
1220
1221 close_dump_file (DFI_combine, print_rtl_with_bb, insns);
1222 timevar_pop (TV_COMBINE);
1223
1224 ggc_collect ();
1225}
1226
1227/* Perform life analysis. */
1228static void
1229rest_of_handle_life (tree decl, rtx insns)
1230{
1231 open_dump_file (DFI_life, decl);
1232 regclass_init ();
1233
1234#ifdef ENABLE_CHECKING
1235 verify_flow_info ();
1236#endif
1237 life_analysis (insns, dump_file, PROP_FINAL);
1238 if (optimize)
1239 cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0) | CLEANUP_UPDATE_LIFE
1240 | CLEANUP_LOG_LINKS
1241 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
1242 timevar_pop (TV_FLOW);
1243
1244 if (warn_uninitialized)
1245 {
1246 uninitialized_vars_warning (DECL_INITIAL (decl));
1247 if (extra_warnings)
1248 setjmp_args_warning ();
1249 }
1250
1251 if (optimize)
1252 {
1253 if (!flag_new_regalloc && initialize_uninitialized_subregs ())
1254 {
1255 /* Insns were inserted, and possibly pseudos created, so
1256 things might look a bit different. */
1257 insns = get_insns ();
1258 allocate_reg_life_data ();
1259 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
1260 PROP_LOG_LINKS | PROP_REG_INFO | PROP_DEATH_NOTES);
1261 }
1262 }
1263
1264 no_new_pseudos = 1;
1265
1266 close_dump_file (DFI_life, print_rtl_with_bb, insns);
1267
1268 ggc_collect ();
1269}
1270
1271/* Perform common subexpression elimination. Nonzero value from
1272 `cse_main' means that jumps were simplified and some code may now
1273 be unreachable, so do jump optimization again. */
1274static void
1275rest_of_handle_cse (tree decl, rtx insns)
1276{
1277 int tem;
1278
1279 open_dump_file (DFI_cse, decl);
1280 if (dump_file)
1281 dump_flow_info (dump_file);
1282 timevar_push (TV_CSE);
1283
1284 reg_scan (insns, max_reg_num (), 1);
1285
1286 tem = cse_main (insns, max_reg_num (), 0, dump_file);
1287 if (tem)
1288 rebuild_jump_labels (insns);
1289 if (purge_all_dead_edges (0))
1290 delete_unreachable_blocks ();
1291
1292 delete_trivially_dead_insns (insns, max_reg_num ());
1293
1294 /* If we are not running more CSE passes, then we are no longer
1295 expecting CSE to be run. But always rerun it in a cheap mode. */
1296 cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
1297
1298 if (tem || optimize > 1)
1299 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1300 /* Try to identify useless null pointer tests and delete them. */
1301 if (flag_delete_null_pointer_checks)
1302 {
1303 timevar_push (TV_JUMP);
1304
1305 if (delete_null_pointer_checks (insns))
1306 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1307 timevar_pop (TV_JUMP);
1308 }
1309
1310 /* The second pass of jump optimization is likely to have
1311 removed a bunch more instructions. */
1312 renumber_insns (dump_file);
1313
1314 timevar_pop (TV_CSE);
1315 close_dump_file (DFI_cse, print_rtl_with_bb, insns);
1316}
1317
1318/* Run second CSE pass after loop optimizations. */
1319static void
1320rest_of_handle_cse2 (tree decl, rtx insns)
1321{
1322 int tem;
1323
1324 timevar_push (TV_CSE2);
1325 open_dump_file (DFI_cse2, decl);
1326 if (dump_file)
1327 dump_flow_info (dump_file);
1328 /* CFG is no longer maintained up-to-date. */
1329 tem = cse_main (insns, max_reg_num (), 1, dump_file);
1330
1331 /* Run a pass to eliminate duplicated assignments to condition code
1332 registers. We have to run this after bypass_jumps, because it
1333 makes it harder for that pass to determine whether a jump can be
1334 bypassed safely. */
1335 cse_condition_code_reg ();
1336
1337 purge_all_dead_edges (0);
1338 delete_trivially_dead_insns (insns, max_reg_num ());
1339
1340 if (tem)
1341 {
1342 timevar_push (TV_JUMP);
1343 rebuild_jump_labels (insns);
1344 cleanup_cfg (CLEANUP_EXPENSIVE);
1345 timevar_pop (TV_JUMP);
1346 }
1347 reg_scan (insns, max_reg_num (), 0);
1348 close_dump_file (DFI_cse2, print_rtl_with_bb, insns);
1349 ggc_collect ();
1350 timevar_pop (TV_CSE2);
1351}
1352
1353/* Perform global cse. */
1354static void
1355rest_of_handle_gcse (tree decl, rtx insns)
1356{
1357 int save_csb, save_cfj;
1358 int tem2 = 0, tem;
1359
1360 timevar_push (TV_GCSE);
1361 open_dump_file (DFI_gcse, decl);
1362
1363 tem = gcse_main (insns, dump_file);
1364 rebuild_jump_labels (insns);
1365 delete_trivially_dead_insns (insns, max_reg_num ());
1366
1367 save_csb = flag_cse_skip_blocks;
1368 save_cfj = flag_cse_follow_jumps;
1369 flag_cse_skip_blocks = flag_cse_follow_jumps = 0;
1370
1371 /* Instantiate any remaining CONSTANT_P_RTX nodes. */
1372 if (current_function_calls_constant_p)
1373 purge_builtin_constant_p ();
1374
1375 /* If -fexpensive-optimizations, re-run CSE to clean up things done
1376 by gcse. */
1377 if (flag_expensive_optimizations)
1378 {
1379 timevar_push (TV_CSE);
1380 reg_scan (insns, max_reg_num (), 1);
1381 tem2 = cse_main (insns, max_reg_num (), 0, dump_file);
1382 purge_all_dead_edges (0);
1383 delete_trivially_dead_insns (insns, max_reg_num ());
1384 timevar_pop (TV_CSE);
1385 cse_not_expected = !flag_rerun_cse_after_loop;
1386 }
1387
1388 /* If gcse or cse altered any jumps, rerun jump optimizations to clean
1389 things up. Then possibly re-run CSE again. */
1390 while (tem || tem2)
1391 {
1392 tem = tem2 = 0;
1393 timevar_push (TV_JUMP);
1394 rebuild_jump_labels (insns);
1395 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1396 timevar_pop (TV_JUMP);
1397
1398 if (flag_expensive_optimizations)
1399 {
1400 timevar_push (TV_CSE);
1401 reg_scan (insns, max_reg_num (), 1);
1402 tem2 = cse_main (insns, max_reg_num (), 0, dump_file);
1403 purge_all_dead_edges (0);
1404 delete_trivially_dead_insns (insns, max_reg_num ());
1405 timevar_pop (TV_CSE);
1406 }
1407 }
1408
1409 close_dump_file (DFI_gcse, print_rtl_with_bb, insns);
1410 timevar_pop (TV_GCSE);
1411
1412 ggc_collect ();
1413 flag_cse_skip_blocks = save_csb;
1414 flag_cse_follow_jumps = save_cfj;
1415#ifdef ENABLE_CHECKING
1416 verify_flow_info ();
1417#endif
1418}
1419
1420/* Move constant computations out of loops. */
1421static void
1422rest_of_handle_loop_optimize (tree decl, rtx insns)
1423{
1424 int do_unroll, do_prefetch;
1425
1426 timevar_push (TV_LOOP);
1427 delete_dead_jumptables ();
1428 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1429 open_dump_file (DFI_loop, decl);
1430
1431 /* CFG is no longer maintained up-to-date. */
1432 free_bb_for_insn ();
1433
1434 if (flag_unroll_loops)
1435 do_unroll = LOOP_AUTO_UNROLL; /* Having two unrollers is useless. */
1436 else
1437 do_unroll = flag_old_unroll_loops ? LOOP_UNROLL : LOOP_AUTO_UNROLL;
1438 do_prefetch = flag_prefetch_loop_arrays ? LOOP_PREFETCH : 0;
1439
1440 if (flag_rerun_loop_opt)
1441 {
1442 cleanup_barriers ();
1443
1444 /* We only want to perform unrolling once. */
1445 loop_optimize (insns, dump_file, do_unroll);
1446 do_unroll = 0;
1447
1448 /* The first call to loop_optimize makes some instructions
1449 trivially dead. We delete those instructions now in the
1450 hope that doing so will make the heuristics in loop work
1451 better and possibly speed up compilation. */
1452 delete_trivially_dead_insns (insns, max_reg_num ());
1453
1454 /* The regscan pass is currently necessary as the alias
1455 analysis code depends on this information. */
1456 reg_scan (insns, max_reg_num (), 1);
1457 }
1458 cleanup_barriers ();
1459 loop_optimize (insns, dump_file, do_unroll | LOOP_BCT | do_prefetch);
1460
1461 /* Loop can create trivially dead instructions. */
1462 delete_trivially_dead_insns (insns, max_reg_num ());
1463 close_dump_file (DFI_loop, print_rtl, insns);
1464 timevar_pop (TV_LOOP);
1465 find_basic_blocks (insns, max_reg_num (), dump_file);
1466
1467 ggc_collect ();
1468}
1469
1470/* Perform loop optimizations. It might be better to do them a bit
1471 sooner, but we want the profile feedback to work more
1472 efficiently. */
1473static void
1474rest_of_handle_loop2 (tree decl, rtx insns)
1475{
1476 struct loops *loops;
1477 basic_block bb;
1478
1479 timevar_push (TV_LOOP);
1480 open_dump_file (DFI_loop2, decl);
1481 if (dump_file)
1482 dump_flow_info (dump_file);
1483
1484 /* Initialize structures for layout changes. */
1485 cfg_layout_initialize ();
1486
1487 loops = loop_optimizer_init (dump_file);
1488
1489 if (loops)
1490 {
1491 /* The optimizations: */
1492 if (flag_unswitch_loops)
1493 unswitch_loops (loops);
1494
1495 if (flag_peel_loops || flag_unroll_loops)
1496 unroll_and_peel_loops (loops,
1497 (flag_peel_loops ? UAP_PEEL : 0) |
1498 (flag_unroll_loops ? UAP_UNROLL : 0) |
1499 (flag_unroll_all_loops ? UAP_UNROLL_ALL : 0));
1500
1501 loop_optimizer_finalize (loops, dump_file);
1502 }
1503
1504 /* Finalize layout changes. */
1505 FOR_EACH_BB (bb)
1506 if (bb->next_bb != EXIT_BLOCK_PTR)
1507 bb->rbi->next = bb->next_bb;
1508 cfg_layout_finalize ();
1509
1510 cleanup_cfg (CLEANUP_EXPENSIVE);
1511 delete_trivially_dead_insns (insns, max_reg_num ());
1512 reg_scan (insns, max_reg_num (), 0);
1513 if (dump_file)
1514 dump_flow_info (dump_file);
1515 close_dump_file (DFI_loop2, print_rtl_with_bb, get_insns ());
1516 timevar_pop (TV_LOOP);
1517 ggc_collect ();
1518}
1519
1520/* This is called from finish_function (within langhooks.parse_file)
1521 after each top-level definition is parsed.
1522 It is supposed to compile that function or variable
1523 and output the assembler code for it.
1524 After we return, the tree storage is freed. */
1525
1526void
1527rest_of_compilation (tree decl)
1528{
1529 rtx insns;
1530
1531 timevar_push (TV_REST_OF_COMPILATION);
1532
1533 /* Register rtl specific functions for cfg. */
1534 rtl_register_cfg_hooks ();
1535
1536 /* Now that we're out of the frontend, we shouldn't have any more
1537 CONCATs anywhere. */
1538 generating_concat_p = 0;
1539
1540 /* When processing delayed functions, prepare_function_start() won't
1541 have been run to re-initialize it. */
1542 cse_not_expected = ! optimize;
1543
1544 /* First, make sure that NOTE_BLOCK is set correctly for each
1545 NOTE_INSN_BLOCK_BEG/NOTE_INSN_BLOCK_END note. */
1546 if (!cfun->x_whole_function_mode_p)
1547 identify_blocks ();
1548
1549 /* In function-at-a-time mode, we do not attempt to keep the BLOCK
1550 tree in sensible shape. So, we just recalculate it here. */
1551 if (cfun->x_whole_function_mode_p)
1552 reorder_blocks ();
1553
1554 init_flow ();
1555
1556 if (rest_of_handle_inlining (decl))
1557 goto exit_rest_of_compilation;
1558
1559 /* If we're emitting a nested function, make sure its parent gets
1560 emitted as well. Doing otherwise confuses debug info. */
1561 {
1562 tree parent;
1563 for (parent = DECL_CONTEXT (current_function_decl);
1564 parent != NULL_TREE;
1565 parent = get_containing_scope (parent))
1566 if (TREE_CODE (parent) == FUNCTION_DECL)
1567 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
1568 }
1569
1570 /* We are now committed to emitting code for this function. Do any
1571 preparation, such as emitting abstract debug info for the inline
1572 before it gets mangled by optimization. */
1573 if (cgraph_function_possibly_inlined_p (decl))
1574 (*debug_hooks->outlining_inline_function) (decl);
1575
1576 /* Remove any notes we don't need. That will make iterating
1577 over the instruction sequence faster, and allow the garbage
1578 collector to reclaim the memory used by the notes. */
1579 remove_unnecessary_notes ();
1580 reorder_blocks ();
1581
1582 ggc_collect ();
1583
1584 /* Initialize some variables used by the optimizers. */
1585 init_function_for_compilation ();
1586
1587 if (! DECL_DEFER_OUTPUT (decl))
1588 TREE_ASM_WRITTEN (decl) = 1;
1589
1590 /* Now that integrate will no longer see our rtl, we need not
1591 distinguish between the return value of this function and the
1592 return value of called functions. Also, we can remove all SETs
1593 of subregs of hard registers; they are only here because of
1594 integrate. Also, we can now initialize pseudos intended to
1595 carry magic hard reg data throughout the function. */
1596 rtx_equal_function_value_matters = 0;
1597 purge_hard_subreg_sets (get_insns ());
1598
1599 /* Early return if there were errors. We can run afoul of our
1600 consistency checks, and there's not really much point in fixing them.
1601 Don't return yet if -Wreturn-type; we need to do cleanup_cfg. */
1602 if (((rtl_dump_and_exit || flag_syntax_only) && !warn_return_type)
1603 || errorcount || sorrycount)
1604 goto exit_rest_of_compilation;
1605
1606 timevar_push (TV_JUMP);
1607 open_dump_file (DFI_sibling, decl);
1608 insns = get_insns ();
1609 rebuild_jump_labels (insns);
1610 find_exception_handler_labels ();
1611 find_basic_blocks (insns, max_reg_num (), dump_file);
1612
1613 delete_unreachable_blocks ();
1614
1615 /* Turn NOTE_INSN_PREDICTIONs into branch predictions. */
1616 if (flag_guess_branch_prob)
1617 {
1618 timevar_push (TV_BRANCH_PROB);
1619 note_prediction_to_br_prob ();
1620 timevar_pop (TV_BRANCH_PROB);
1621 }
1622
1623 if (flag_optimize_sibling_calls)
1624 rest_of_handle_sibling_calls (insns);
1625
1626 /* We have to issue these warnings now already, because CFG cleanups
1627 further down may destroy the required information. However, this
1628 must be done after the sibcall optimization pass because the barrier
1629 emitted for noreturn calls that are candidate for the optimization
1630 is folded into the CALL_PLACEHOLDER until after this pass, so the
1631 CFG is inaccurate. */
1632 check_function_return_warnings ();
1633
1634 timevar_pop (TV_JUMP);
1635
1636 insn_locators_initialize ();
1637 /* Complete generation of exception handling code. */
1638 if (doing_eh (0))
1639 {
1640 timevar_push (TV_JUMP);
1641 open_dump_file (DFI_eh, decl);
1642
1643 finish_eh_generation ();
1644
1645 close_dump_file (DFI_eh, print_rtl, get_insns ());
1646 timevar_pop (TV_JUMP);
1647 }
1648
1649 /* Delay emitting hard_reg_initial_value sets until after EH landing pad
1650 generation, which might create new sets. */
1651 emit_initial_value_sets ();
1652
1653#ifdef FINALIZE_PIC
1654 /* If we are doing position-independent code generation, now
1655 is the time to output special prologues and epilogues.
1656 We do not want to do this earlier, because it just clutters
1657 up inline functions with meaningless insns. */
1658 if (flag_pic)
1659 FINALIZE_PIC;
1660#endif
1661
1662 insns = get_insns ();
1663
1664 /* Copy any shared structure that should not be shared. */
1665 unshare_all_rtl (current_function_decl, insns);
1666
1667#ifdef SETJMP_VIA_SAVE_AREA
1668 /* This must be performed before virtual register instantiation.
1669 Please be aware the everything in the compiler that can look
1670 at the RTL up to this point must understand that REG_SAVE_AREA
1671 is just like a use of the REG contained inside. */
1672 if (current_function_calls_alloca)
1673 optimize_save_area_alloca (insns);
1674#endif
1675
1676 /* Instantiate all virtual registers. */
1677 instantiate_virtual_regs (current_function_decl, insns);
1678
1679 open_dump_file (DFI_jump, decl);
1680
1681 /* Always do one jump optimization pass to ensure that JUMP_LABEL fields
1682 are initialized and to compute whether control can drop off the end
1683 of the function. */
1684
1685 timevar_push (TV_JUMP);
1686 /* Turn NOTE_INSN_EXPECTED_VALUE into REG_BR_PROB. Do this
1687 before jump optimization switches branch directions. */
1688 if (flag_guess_branch_prob)
1689 expected_value_to_br_prob ();
1690
1691 reg_scan (insns, max_reg_num (), 0);
1692 rebuild_jump_labels (insns);
1693 find_basic_blocks (insns, max_reg_num (), dump_file);
1694 delete_trivially_dead_insns (insns, max_reg_num ());
1695 if (dump_file)
1696 dump_flow_info (dump_file);
1697 cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0) | CLEANUP_PRE_LOOP
1698 | (flag_thread_jumps ? CLEANUP_THREADING : 0));
1699
1700 if (optimize)
1701 {
1702 free_bb_for_insn ();
1703 copy_loop_headers (insns);
1704 find_basic_blocks (insns, max_reg_num (), dump_file);
1705 }
1706 purge_line_number_notes (insns);
1707
1708 timevar_pop (TV_JUMP);
1709 close_dump_file (DFI_jump, print_rtl, insns);
1710
1711 /* Now is when we stop if -fsyntax-only and -Wreturn-type. */
1712 if (rtl_dump_and_exit || flag_syntax_only || DECL_DEFER_OUTPUT (decl))
1713 goto exit_rest_of_compilation;
1714
1715 timevar_push (TV_JUMP);
1716
1717 if (optimize)
1718 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_PRE_LOOP);
1719
1720 if (flag_delete_null_pointer_checks)
1721 rest_of_handle_null_pointer (decl, insns);
1722
1723 /* Jump optimization, and the removal of NULL pointer checks, may
1724 have reduced the number of instructions substantially. CSE, and
1725 future passes, allocate arrays whose dimensions involve the
1726 maximum instruction UID, so if we can reduce the maximum UID
1727 we'll save big on memory. */
1728 renumber_insns (dump_file);
1729 timevar_pop (TV_JUMP);
1730
1731 close_dump_file (DFI_jump, print_rtl_with_bb, insns);
1732
1733 ggc_collect ();
1734
1735 if (optimize > 0)
1736 rest_of_handle_cse (decl, insns);
1737
1738 rest_of_handle_addressof (decl, insns);
1739
1740 ggc_collect ();
1741
1742 if (optimize > 0)
1743 {
1744 if (flag_gcse)
1745 rest_of_handle_gcse (decl, insns);
1746
1747 if (flag_loop_optimize)
1748 rest_of_handle_loop_optimize (decl, insns);
1749
1750 if (flag_gcse)
1751 rest_of_handle_jump_bypass (decl, insns);
1752 }
1753
1754 timevar_push (TV_FLOW);
1755
1756 rest_of_handle_cfg (decl, insns);
1757
1758 if (optimize > 0
1759 || profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
1760 {
1761 rest_of_handle_branch_prob (decl, insns);
1762
1763 if (flag_branch_probabilities
1764 && flag_profile_values
1765 && flag_value_profile_transformations)
1766 rest_of_handle_value_profile_transformations (decl, insns);
1767
1768 /* Remove the death notes created for vpt. */
1769 if (flag_profile_values)
1770 count_or_remove_death_notes (NULL, 1);
1771 }
1772
1773 if (optimize > 0)
1774 rest_of_handle_if_conversion (decl, insns);
1775
1776 if (flag_tracer)
1777 rest_of_handle_tracer (decl, insns);
1778
1779 if (optimize > 0
1780 && (flag_unswitch_loops
1781 || flag_peel_loops
1782 || flag_unroll_loops))
1783 rest_of_handle_loop2 (decl, insns);
1784
1785 if (flag_web)
1786 rest_of_handle_web (decl, insns);
1787
1788 if (flag_rerun_cse_after_loop)
1789 rest_of_handle_cse2 (decl, insns);
1790
1791 cse_not_expected = 1;
1792
1793 rest_of_handle_life (decl, insns);
1794
1795 if (optimize > 0)
1796 rest_of_handle_combine (decl, insns);
1797
1798 if (flag_if_conversion)
1799 rest_of_handle_if_after_combine (decl, insns);
1800
1801 if (optimize > 0 && (flag_regmove || flag_expensive_optimizations))
1802 rest_of_handle_regmove (decl, insns);
1803
1804 /* Do unconditional splitting before register allocation to allow machine
1805 description to add extra information not needed previously. */
1806 split_all_insns (1);
1807
1808#ifdef OPTIMIZE_MODE_SWITCHING
1809 timevar_push (TV_MODE_SWITCH);
1810
1811 no_new_pseudos = 0;
1812 optimize_mode_switching (NULL);
1813 no_new_pseudos = 1;
1814
1815 timevar_pop (TV_MODE_SWITCH);
1816#endif
1817
1818 /* Any of the several passes since flow1 will have munged register
1819 lifetime data a bit. We need it to be up to date for scheduling
1820 (see handling of reg_known_equiv in init_alias_analysis). */
1821 recompute_reg_usage (insns, !optimize_size);
1822
1823#ifdef INSN_SCHEDULING
1824 rest_of_handle_sched (decl, insns);
1825#endif
1826
1827 /* Determine if the current function is a leaf before running reload
1828 since this can impact optimizations done by the prologue and
1829 epilogue thus changing register elimination offsets. */
1830 current_function_is_leaf = leaf_function_p ();
1831
1832 timevar_push (TV_LOCAL_ALLOC);
1833 open_dump_file (DFI_lreg, decl);
1834
1835 if (flag_new_regalloc)
1836 {
1837 if (rest_of_handle_new_regalloc (decl, insns))
1838 goto exit_rest_of_compilation;
1839 }
1840 else
1841 {
1842 if (rest_of_handle_old_regalloc (decl, insns))
1843 goto exit_rest_of_compilation;
1844 }
1845
1846 ggc_collect ();
1847
1848 open_dump_file (DFI_postreload, decl);
1849
1850 /* Do a very simple CSE pass over just the hard registers. */
1851 if (optimize > 0)
1852 {
1853 timevar_push (TV_RELOAD_CSE_REGS);
1854 reload_cse_regs (insns);
1855 /* reload_cse_regs can eliminate potentially-trapping MEMs.
1856 Remove any EH edges associated with them. */
1857 if (flag_non_call_exceptions)
1858 purge_all_dead_edges (0);
1859 timevar_pop (TV_RELOAD_CSE_REGS);
1860 }
1861
1862 close_dump_file (DFI_postreload, print_rtl_with_bb, insns);
1863
f9957958
MH
1864 if (optimize > 0 && flag_gcse_after_reload)
1865 rest_of_handle_gcse2 (decl, insns);
1866
f6db1481
RH
1867 /* Re-create the death notes which were deleted during reload. */
1868 timevar_push (TV_FLOW2);
1869 open_dump_file (DFI_flow2, decl);
1870
1871#ifdef ENABLE_CHECKING
1872 verify_flow_info ();
1873#endif
1874
1875 /* If optimizing, then go ahead and split insns now. */
1876#ifndef STACK_REGS
1877 if (optimize > 0)
1878#endif
1879 split_all_insns (0);
1880
1881 if (flag_branch_target_load_optimize)
1882 {
1883 open_dump_file (DFI_branch_target_load, decl);
1884
1885 branch_target_load_optimize (insns, false);
1886
1887 close_dump_file (DFI_branch_target_load, print_rtl_with_bb, insns);
1888
1889 ggc_collect ();
1890 }
1891
1892 if (optimize)
1893 cleanup_cfg (CLEANUP_EXPENSIVE);
1894
1895 /* On some machines, the prologue and epilogue code, or parts thereof,
1896 can be represented as RTL. Doing so lets us schedule insns between
1897 it and the rest of the code and also allows delayed branch
1898 scheduling to operate in the epilogue. */
1899 thread_prologue_and_epilogue_insns (insns);
1900 epilogue_completed = 1;
1901
1902 if (optimize)
1903 {
1904 life_analysis (insns, dump_file, PROP_POSTRELOAD);
1905 cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE
1906 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));
1907
1908 /* This is kind of a heuristic. We need to run combine_stack_adjustments
1909 even for machines with possibly nonzero RETURN_POPS_ARGS
1910 and ACCUMULATE_OUTGOING_ARGS. We expect that only ports having
1911 push instructions will have popping returns. */
1912#ifndef PUSH_ROUNDING
1913 if (!ACCUMULATE_OUTGOING_ARGS)
1914#endif
1915 combine_stack_adjustments ();
1916
1917 ggc_collect ();
1918 }
1919
1920 flow2_completed = 1;
1921
1922 close_dump_file (DFI_flow2, print_rtl_with_bb, insns);
1923 timevar_pop (TV_FLOW2);
1924
1925#ifdef HAVE_peephole2
1926 if (optimize > 0 && flag_peephole2)
1927 {
1928 timevar_push (TV_PEEPHOLE2);
1929 open_dump_file (DFI_peephole2, decl);
1930
1931 peephole2_optimize (dump_file);
1932
1933 close_dump_file (DFI_peephole2, print_rtl_with_bb, insns);
1934 timevar_pop (TV_PEEPHOLE2);
1935 }
1936#endif
1937
1938 open_dump_file (DFI_ce3, decl);
1939 if (optimize)
1940 /* Last attempt to optimize CFG, as scheduling, peepholing and insn
1941 splitting possibly introduced more crossjumping opportunities. */
1942 cleanup_cfg (CLEANUP_EXPENSIVE
1943 | CLEANUP_UPDATE_LIFE
1944 | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));
1945 if (flag_if_conversion2)
1946 {
1947 timevar_push (TV_IFCVT2);
1948
1949 if_convert (1);
1950
1951 timevar_pop (TV_IFCVT2);
1952 }
1953 close_dump_file (DFI_ce3, print_rtl_with_bb, insns);
1954
1955 if (optimize > 0)
1956 {
1957 if (flag_rename_registers || flag_cprop_registers)
1958 rest_of_handle_regrename (decl, insns);
1959
1960 rest_of_handle_reorder_blocks (decl, insns);
1961 }
1962
1963 if (flag_branch_target_load_optimize2)
1964 {
1965 /* Leave this a warning for now so that it is possible to experiment
1966 with running this pass twice. In 3.6, we should either make this
1967 an error, or use separate dump files. */
1968 if (flag_branch_target_load_optimize)
1969 warning ("branch target register load optimization is not intended "
1970 "to be run twice");
1971
1972 open_dump_file (DFI_branch_target_load, decl);
1973
1974 branch_target_load_optimize (insns, true);
1975
1976 close_dump_file (DFI_branch_target_load, print_rtl_with_bb, insns);
1977
1978 ggc_collect ();
1979 }
1980
1981#ifdef INSN_SCHEDULING
1982 if (optimize > 0 && flag_schedule_insns_after_reload)
1983 rest_of_handle_sched2 (decl, insns);
1984#endif
1985
1986#ifdef LEAF_REGISTERS
1987 current_function_uses_only_leaf_regs
1988 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
1989#endif
1990
1991#ifdef STACK_REGS
1992 rest_of_handle_stack_regs (decl, insns);
1993#endif
1994
1995 compute_alignments ();
1996
1997 if (flag_var_tracking)
1998 rest_of_handle_variable_tracking (decl, insns);
1999
2000 /* CFG is no longer maintained up-to-date. */
2001 free_bb_for_insn ();
2002
2003 if (targetm.machine_dependent_reorg != 0)
2004 rest_of_handle_machine_reorg (decl, insns);
2005
2006 purge_line_number_notes (insns);
2007 cleanup_barriers ();
2008
2009#ifdef DELAY_SLOTS
2010 if (optimize > 0 && flag_delayed_branch)
2011 rest_of_handle_delay_slots (decl, insns);
2012#endif
2013
2014#if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
2015 timevar_push (TV_SHORTEN_BRANCH);
2016 split_all_insns_noflow ();
2017 timevar_pop (TV_SHORTEN_BRANCH);
2018#endif
2019
2020 convert_to_eh_region_ranges ();
2021
2022 /* Shorten branches. */
2023 timevar_push (TV_SHORTEN_BRANCH);
2024 shorten_branches (get_insns ());
2025 timevar_pop (TV_SHORTEN_BRANCH);
2026
2027 set_nothrow_function_flags ();
2028 if (current_function_nothrow)
2029 /* Now we know that this can't throw; set the flag for the benefit
2030 of other functions later in this translation unit. */
2031 TREE_NOTHROW (current_function_decl) = 1;
2032
2033 rest_of_handle_final (decl, insns);
2034
2035 /* Write DBX symbols if requested. */
2036
2037 /* Note that for those inline functions where we don't initially
2038 know for certain that we will be generating an out-of-line copy,
2039 the first invocation of this routine (rest_of_compilation) will
2040 skip over this code by doing a `goto exit_rest_of_compilation;'.
2041 Later on, wrapup_global_declarations will (indirectly) call
2042 rest_of_compilation again for those inline functions that need
2043 to have out-of-line copies generated. During that call, we
2044 *will* be routed past here. */
2045
2046 timevar_push (TV_SYMOUT);
2047 (*debug_hooks->function_decl) (decl);
2048 timevar_pop (TV_SYMOUT);
2049
2050 exit_rest_of_compilation:
2051
2052 coverage_end_function ();
2053
2054 /* In case the function was not output,
2055 don't leave any temporary anonymous types
2056 queued up for sdb output. */
2057#ifdef SDB_DEBUGGING_INFO
2058 if (write_symbols == SDB_DEBUG)
2059 sdbout_types (NULL_TREE);
2060#endif
2061
2062 reload_completed = 0;
2063 epilogue_completed = 0;
2064 flow2_completed = 0;
2065 no_new_pseudos = 0;
2066
2067 timevar_push (TV_FINAL);
2068
2069 /* Clear out the insn_length contents now that they are no
2070 longer valid. */
2071 init_insn_lengths ();
2072
2073 /* Show no temporary slots allocated. */
2074 init_temp_slots ();
2075
2076 free_basic_block_vars (0);
2077 free_bb_for_insn ();
2078
2079 timevar_pop (TV_FINAL);
2080
2081 if ((*targetm.binds_local_p) (current_function_decl))
2082 {
2083 int pref = cfun->preferred_stack_boundary;
2084 if (cfun->recursive_call_emit
2085 && cfun->stack_alignment_needed > cfun->preferred_stack_boundary)
2086 pref = cfun->stack_alignment_needed;
2087 cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
2088 = pref;
2089 }
2090
2091 /* Make sure volatile mem refs aren't considered valid operands for
2092 arithmetic insns. We must call this here if this is a nested inline
2093 function, since the above code leaves us in the init_recog state
2094 (from final.c), and the function context push/pop code does not
2095 save/restore volatile_ok.
2096
2097 ??? Maybe it isn't necessary for expand_start_function to call this
2098 anymore if we do it here? */
2099
2100 init_recog_no_volatile ();
2101
2102 /* We're done with this function. Free up memory if we can. */
2103 free_after_parsing (cfun);
2104 if (! DECL_DEFER_OUTPUT (decl))
2105 {
2106 free_after_compilation (cfun);
2107 DECL_STRUCT_FUNCTION (decl) = 0;
2108 }
2109 cfun = 0;
2110
2111 ggc_collect ();
2112
2113 timevar_pop (TV_REST_OF_COMPILATION);
2114}
2115
2116void
2117init_optimization_passes (void)
2118{
2119 if (flag_unit_at_a_time)
2120 {
2121 open_dump_file (DFI_cgraph, NULL);
2122 cgraph_dump_file = dump_file;
2123 dump_file = NULL;
2124 }
2125}
2126
2127void
2128finish_optimization_passes (void)
2129{
2130 if (profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
2131 {
2132 timevar_push (TV_DUMP);
2133 open_dump_file (DFI_bp, NULL);
2134
2135 end_branch_prob ();
2136
2137 close_dump_file (DFI_bp, NULL, NULL_RTX);
2138 timevar_pop (TV_DUMP);
2139 }
2140
2141 if (optimize > 0 && open_dump_file (DFI_combine, NULL))
2142 {
2143 timevar_push (TV_DUMP);
2144 dump_combine_total_stats (dump_file);
2145 close_dump_file (DFI_combine, NULL, NULL_RTX);
2146 timevar_pop (TV_DUMP);
2147 }
2148
2149 if (flag_unit_at_a_time)
2150 {
2151 dump_file = cgraph_dump_file;
2152 cgraph_dump_file = NULL;
2153 close_dump_file (DFI_cgraph, NULL, NULL_RTX);
2154 }
2155
2156 /* Do whatever is necessary to finish printing the graphs. */
2157 if (graph_dump_format != no_graph)
2158 {
2159 int i;
2160
2161 for (i = 0; i < (int) DFI_MAX; ++i)
2162 if (dump_file_tbl[i].initialized && dump_file_tbl[i].graph_dump_p)
2163 {
2164 char seq[16];
2165 char *suffix;
2166
2167 sprintf (seq, DUMPFILE_FORMAT, i);
2168 suffix = concat (seq, dump_file_tbl[i].extension, NULL);
2169 finish_graph_dump_file (dump_base_name, suffix);
2170 free (suffix);
2171 }
2172 }
2173
2174}
2175
2176bool
2177enable_rtl_dump_file (int letter)
2178{
2179 bool matched = false;
2180 int i;
2181
2182 if (letter == 'a')
2183 {
2184 for (i = 0; i < (int) DFI_MAX; ++i)
2185 dump_file_tbl[i].enabled = 1;
2186 matched = true;
2187 }
2188 else
2189 {
2190 for (i = 0; i < (int) DFI_MAX; ++i)
2191 if (letter == dump_file_tbl[i].debug_switch)
2192 {
2193 dump_file_tbl[i].enabled = 1;
2194 matched = true;
2195 }
2196 }
2197
2198 return matched;
2199}
This page took 0.23589 seconds and 5 git commands to generate.