From f133022600b6df6e3ed8eae718b22a8534b60874 Mon Sep 17 00:00:00 2001 From: Jan Hubicka Date: Sat, 4 Aug 2001 14:08:43 +0200 Subject: [PATCH] * loop.c (try_copy_prop); Kill invalidated REG_EQUAL notes. * reload1.c (fixup_abnormal_edges): New static function. (reload): Use it. * flow.c (need_fake_edge_p): New function. (flow_call_edges_add): Fix handling of noreturn and sibbling calls; avoid call insn to be very last insn in the insn stream. * profile.c (branch_prob): Call flow_call_edges_add instead of doing that by hand; cleanup cfg to re-merge basic blocks once we are done. From-SVN: r44635 --- gcc/ChangeLog | 15 +++++++++++ gcc/flow.c | 73 ++++++++++++++++++++++++++++++++++++++++++++++++--- gcc/loop.c | 11 +++++++- gcc/profile.c | 30 ++++++--------------- gcc/reload1.c | 55 ++++++++++++++++++++++++++++++++++++++ 5 files changed, 157 insertions(+), 27 deletions(-) diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 70d31306d890..745011526025 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,18 @@ +Sat Aug 4 13:51:36 CEST 2001 Jan Hubicka + + * loop.c (try_copy_prop); Kill invalidated REG_EQUAL notes. + + * reload1.c (fixup_abnormal_edges): New static function. + (reload): Use it. + + * flow.c (need_fake_edge_p): New function. + (flow_call_edges_add): Fix handling of noreturn and sibbling calls; + avoid call insn to be very last insn in the insn stream. + + * profile.c (branch_prob): Call flow_call_edges_add instead of + doing that by hand; cleanup cfg to re-merge basic blocks once + we are done. + 2001-08-04 Neil Booth * Makefile.in (CPPLIB_H): New, so that dependencies on cpplib.h diff --git a/gcc/flow.c b/gcc/flow.c index ce67be58167e..c602fb179d01 100644 --- a/gcc/flow.c +++ b/gcc/flow.c @@ -485,6 +485,7 @@ static int flow_loop_level_compute PARAMS ((struct loop *, int)); static int flow_loops_level_compute PARAMS ((struct loops *)); static void delete_dead_jumptables PARAMS ((void)); static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block)); +static bool need_fake_edge_p PARAMS ((rtx)); /* Find basic blocks of the current function. F is the first insn of the function and NREGS the number of register @@ -2500,9 +2501,35 @@ commit_edge_insertions () } } -/* Add fake edges to the function exit for any non constant calls in - the bitmap of blocks specified by BLOCKS or to the whole CFG if - BLOCKS is zero. Return the nuber of blocks that were split. */ +/* Return true if we need to add fake edge to exit. + Helper function for the flow_call_edges_add. */ +static bool +need_fake_edge_p (insn) + rtx insn; +{ + if (!INSN_P (insn)) + return false; + + if ((GET_CODE (insn) == CALL_INSN + && !SIBLING_CALL_P (insn) + && !find_reg_note (insn, REG_NORETURN, NULL) && !CONST_CALL_P (insn))) + return true; + + return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS + && MEM_VOLATILE_P (PATTERN (insn))) + || (GET_CODE (PATTERN (insn)) == PARALLEL + && asm_noperands (insn) != -1 + && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0))) + || GET_CODE (PATTERN (insn)) == ASM_INPUT); +} + +/* Add fake edges to the function exit for any non constant and non noreturn + calls, volatile inline assembly in the bitmap of blocks specified by + BLOCKS or to the whole CFG if BLOCKS is zero. Return the nuber of blocks + that were split. + + The goal is to expose cases in which entering a basic block does not imply + that all subsequent instructions must be executed. */ int flow_call_edges_add (blocks) @@ -2512,6 +2539,7 @@ flow_call_edges_add (blocks) int blocks_split = 0; int bb_num = 0; basic_block *bbs; + bool check_last_block = false; /* Map bb indicies into basic block pointers since split_block will renumber the basic blocks. */ @@ -2522,15 +2550,41 @@ flow_call_edges_add (blocks) { for (i = 0; i < n_basic_blocks; i++) bbs[bb_num++] = BASIC_BLOCK (i); + check_last_block = true; } else { EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i, { bbs[bb_num++] = BASIC_BLOCK (i); + if (i == n_basic_blocks - 1) + check_last_block = true; }); } + /* In the last basic block, before epilogue generation, there will be + a fallthru edge to EXIT. Special care is required if the last insn + of the last basic block is a call because make_edge folds duplicate + edges, which would result in the fallthru edge also being marked + fake, which would result in the fallthru edge being removed by + remove_fake_edges, which would result in an invalid CFG. + + Moreover, we can't elide the outgoing fake edge, since the block + profiler needs to take this into account in order to solve the minimal + spanning tree in the case that the call doesn't return. + + Handle this by adding a dummy instruction in a new last basic block. */ + if (check_last_block + && need_fake_edge_p (BASIC_BLOCK (n_basic_blocks - 1)->end)) + { + edge e; + for (e = BASIC_BLOCK (n_basic_blocks - 1)->succ; e; e = e->succ_next) + if (e->dest == EXIT_BLOCK_PTR) + break; + insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e); + commit_edge_insertions (); + } + /* Now add fake edges to the function exit for any non constant calls since there is no way that we can determine if they will @@ -2545,10 +2599,21 @@ flow_call_edges_add (blocks) for (insn = bb->end; ; insn = prev_insn) { prev_insn = PREV_INSN (insn); - if (GET_CODE (insn) == CALL_INSN && ! CONST_CALL_P (insn)) + if (need_fake_edge_p (insn)) { edge e; + /* The above condition should be enought to verify that there is + no edge to the exit block in CFG already. Calling make_edge in + such case would make us to mark that edge as fake and remove it + later. */ +#ifdef ENABLE_CHECKING + if (insn == bb->end) + for (e = bb->succ; e; e = e->succ_next) + if (e->dest == EXIT_BLOCK_PTR) + abort (); +#endif + /* Note that the following may create a new basic block and renumber the existing basic blocks. */ e = split_block (bb, insn); diff --git a/gcc/loop.c b/gcc/loop.c index 998832ca8d2e..fbbef734b7c7 100644 --- a/gcc/loop.c +++ b/gcc/loop.c @@ -9289,7 +9289,16 @@ try_copy_prop (loop, replacement, regno) arg.set_seen = 0; note_stores (PATTERN (insn), note_reg_stored, &arg); if (arg.set_seen) - break; + { + rtx note = find_reg_note (insn, REG_EQUAL, NULL); + + /* It is possible that we've turned previously valid REG_EQUAL to + invalid, as we change the REGNO to REPLACEMENT and unlike REGNO, + REPLACEMENT is modified, we get different meaning. */ + if (note && reg_mentioned_p (replacement, XEXP (note, 0))) + remove_note (insn, note); + break; + } } } if (! init_insn) diff --git a/gcc/profile.c b/gcc/profile.c index ddb621a6d191..1335e9cd5939 100644 --- a/gcc/profile.c +++ b/gcc/profile.c @@ -528,6 +528,8 @@ branch_prob () total_num_times_called++; + flow_call_edges_add (NULL); + /* We can't handle cyclic regions constructed using abnormal edges. To avoid these we replace every source of abnormal edge by a fake edge from entry node and every destination by fake edge to exit. @@ -562,28 +564,6 @@ branch_prob () have_entry_edge = 1; } - /* ??? Not strictly needed unless flag_test_coverage, but adding - them anyway keeps the .da file consistent. */ - /* ??? Currently inexact for basic blocks with multiple calls. - We need to split blocks here. */ - for (insn = bb->head; - insn != NEXT_INSN (bb->end); - insn = NEXT_INSN (insn)) - { - rtx set; - if (GET_CODE (insn) == CALL_INSN && !CONST_CALL_P (insn)) - need_exit_edge = 1; - else if (GET_CODE (insn) == INSN) - { - set = PATTERN (insn); - if (GET_CODE (set) == PARALLEL) - set = XVECEXP (set, 0, 0); - if ((GET_CODE (set) == ASM_OPERANDS && MEM_VOLATILE_P (set)) - || GET_CODE (set) == ASM_INPUT) - need_exit_edge = 1; - } - } - if (need_exit_edge && !have_exit_edge) { if (rtl_dump_file) @@ -787,6 +767,12 @@ branch_prob () } remove_fake_edges (); + /* Re-merge split basic blocks and the mess introduced by + insert_insn_on_edge. */ + cleanup_cfg (profile_arc_flag ? CLEANUP_EXPENSIVE : 0); + if (rtl_dump_file) + dump_flow_info (rtl_dump_file); + free (edge_infos); free_edge_list (el); } diff --git a/gcc/reload1.c b/gcc/reload1.c index ced823b0c1b5..09ef4ef99779 100644 --- a/gcc/reload1.c +++ b/gcc/reload1.c @@ -461,6 +461,7 @@ static void failed_reload PARAMS ((rtx, int)); static int set_reload_reg PARAMS ((int, int)); static void reload_cse_delete_noop_set PARAMS ((rtx, rtx)); static void reload_cse_simplify PARAMS ((rtx)); +static void fixup_abnormal_edges PARAMS ((void)); extern void dump_needs PARAMS ((struct insn_chain *)); /* Initialize the reload pass once per compilation. */ @@ -1269,6 +1270,7 @@ reload (first, global) /* Free all the insn_chain structures at once. */ obstack_free (&reload_obstack, reload_startobj); unused_insn_chains = 0; + fixup_abnormal_edges (); return failure; } @@ -9470,3 +9472,56 @@ copy_eh_notes (insn, x) } } +/* This is used by reload pass, that does emit some instructions after + abnormal calls moving basic block end, but in fact it wants to emit + them on the edge. Looks for abnormal call edges, find backward the + proper call and fix the damage. + + Similar handle instructions throwing exceptions internally. */ +static void +fixup_abnormal_edges () +{ + int i; + bool inserted = false; + + for (i = 0; i < n_basic_blocks; i++) + { + basic_block bb = BASIC_BLOCK (i); + edge e; + + /* Look for cases we are interested in - an calls or instructions causing + exceptions. */ + for (e = bb->succ; e; e = e->succ_next) + { + if (e->flags & EDGE_ABNORMAL_CALL) + break; + if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) + == (EDGE_ABNORMAL | EDGE_EH)) + break; + } + if (e && GET_CODE (bb->end) != CALL_INSN && !can_throw_internal (bb->end)) + { + rtx insn = bb->end; + rtx next; + for (e = bb->succ; e; e = e->succ_next) + if (e->flags & EDGE_FALLTHRU) + break; + while (GET_CODE (insn) == INSN && !can_throw_internal (insn)) + insn = PREV_INSN (insn); + if (GET_CODE (insn) != CALL_INSN && !can_throw_internal (insn)) + abort (); + bb->end = insn; + inserted = true; + insn = NEXT_INSN (insn); + while (insn && GET_CODE (insn) == INSN) + { + next = NEXT_INSN (insn); + insert_insn_on_edge (PATTERN (insn), e); + flow_delete_insn (insn); + insn = next; + } + } + } + if (inserted) + commit_edge_insertions (); +} -- 2.43.5