[committed] Use target-insns.def for prologue & epilogue insns

Richard Sandiford richard.sandiford@arm.com
Tue Jun 30 20:56:00 GMT 2015


Bootstrapped & regression-tested on x86_64-linux-gnu and aarch64-linux-gnu.
Also tested via config-list.mk.  Committed as preapproved.

Thanks,
Richard


gcc/
	* defaults.h (HAVE_epilogue, gen_epilogue): Delete.
	* target-insns.def (epilogue, prologue, sibcall_prologue): New
	targetm instruction patterns.
	* alias.c (init_alias_analysis): Use them instead of HAVE_*/gen_*
	interface.
	* calls.c (expand_call): Likewise.
	* cfgrtl.c (cfg_layout_finalize): Likewise.
	* df-scan.c (df_get_entry_block_def_set): Likewise.
	(df_get_exit_block_use_set): Likewise.
	* dwarf2cfi.c (pass_dwarf2_frame::gate): Likewise.
	* final.c (final_start_function): Likewise.
	* function.c (thread_prologue_and_epilogue_insns): Likewise.
	(reposition_prologue_and_epilogue_notes): Likewise.
	* reorg.c (find_end_label): Likewise.
	* toplev.c (process_options): Likewise.

Index: gcc/defaults.h
===================================================================
--- gcc/defaults.h	2015-06-30 21:54:23.984536147 +0100
+++ gcc/defaults.h	2015-06-30 21:54:23.972536284 +0100
@@ -1426,16 +1426,6 @@ #define STACK_CHECK_MAX_VAR_SIZE (STACK_
 #define TARGET_VTABLE_USES_DESCRIPTORS 0
 #endif
 
-#ifndef HAVE_epilogue
-#define HAVE_epilogue 0
-static inline rtx
-gen_epilogue ()
-{
-  gcc_unreachable ();
-  return NULL;
-}
-#endif
-
 #ifndef HAVE_mem_thread_fence
 #define HAVE_mem_thread_fence 0
 static inline rtx
Index: gcc/target-insns.def
===================================================================
--- gcc/target-insns.def	2015-06-30 21:54:23.984536147 +0100
+++ gcc/target-insns.def	2015-06-30 21:54:23.976536238 +0100
@@ -30,6 +30,9 @@
    Patterns that take no operands should have a prototype "(void)".
 
    Instructions should be documented in md.texi rather than here.  */
+DEF_TARGET_INSN (canonicalize_funcptr_for_compare, (rtx x0, rtx x1))
+DEF_TARGET_INSN (epilogue, (void))
+DEF_TARGET_INSN (prologue, (void))
 DEF_TARGET_INSN (return, (void))
+DEF_TARGET_INSN (sibcall_epilogue, (void))
 DEF_TARGET_INSN (simple_return, (void))
-DEF_TARGET_INSN (canonicalize_funcptr_for_compare, (rtx x0, rtx x1))
Index: gcc/alias.c
===================================================================
--- gcc/alias.c	2015-06-30 21:54:23.984536147 +0100
+++ gcc/alias.c	2015-06-30 21:54:23.972536284 +0100
@@ -3038,6 +3038,14 @@ init_alias_analysis (void)
   rpo = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
   rpo_cnt = pre_and_rev_post_order_compute (NULL, rpo, false);
 
+  /* The prologue/epilogue insns are not threaded onto the
+     insn chain until after reload has completed.  Thus,
+     there is no sense wasting time checking if INSN is in
+     the prologue/epilogue until after reload has completed.  */
+  bool could_be_prologue_epilogue = ((targetm.have_prologue ()
+				      || targetm.have_epilogue ())
+				     && reload_completed);
+
   pass = 0;
   do
     {
@@ -3076,17 +3084,7 @@ init_alias_analysis (void)
 		{
 		  rtx note, set;
 
-#if defined (HAVE_prologue)
-		  static const bool prologue = true;
-#else
-		  static const bool prologue = false;
-#endif
-
-		  /* The prologue/epilogue insns are not threaded onto the
-		     insn chain until after reload has completed.  Thus,
-		     there is no sense wasting time checking if INSN is in
-		     the prologue/epilogue until after reload has completed.  */
-		  if ((prologue || HAVE_epilogue) && reload_completed
+		  if (could_be_prologue_epilogue
 		      && prologue_epilogue_contains (insn))
 		    continue;
 
Index: gcc/calls.c
===================================================================
--- gcc/calls.c	2015-06-30 21:54:23.984536147 +0100
+++ gcc/calls.c	2015-06-30 21:54:23.972536284 +0100
@@ -2783,13 +2783,8 @@ expand_call (tree exp, rtx target, int i
     try_tail_call = 0;
 
   /*  Rest of purposes for tail call optimizations to fail.  */
-  if (
-#ifdef HAVE_sibcall_epilogue
-      !HAVE_sibcall_epilogue
-#else
-      1
-#endif
-      || !try_tail_call
+  if (!try_tail_call
+      || !targetm.have_sibcall_epilogue ()
       /* Doing sibling call optimization needs some work, since
 	 structure_value_addr can be allocated on the stack.
 	 It does not seem worth the effort since few optimizable
Index: gcc/cfgrtl.c
===================================================================
--- gcc/cfgrtl.c	2015-06-30 21:54:23.984536147 +0100
+++ gcc/cfgrtl.c	2015-06-30 21:54:23.972536284 +0100
@@ -4324,7 +4324,7 @@ cfg_layout_finalize (void)
 #endif
   force_one_exit_fallthru ();
   rtl_register_cfg_hooks ();
-  if (reload_completed && !HAVE_epilogue)
+  if (reload_completed && !targetm.have_epilogue ())
     fixup_fallthru_exit_predecessor ();
   fixup_reorder_chain ();
 
Index: gcc/df-scan.c
===================================================================
--- gcc/df-scan.c	2015-06-30 21:54:23.984536147 +0100
+++ gcc/df-scan.c	2015-06-30 21:54:23.976536238 +0100
@@ -52,13 +52,6 @@ Software Foundation; either version 3, o
 typedef struct df_mw_hardreg *df_mw_hardreg_ptr;
 
 
-#ifndef HAVE_prologue
-#define HAVE_prologue 0
-#endif
-#ifndef HAVE_sibcall_epilogue
-#define HAVE_sibcall_epilogue 0
-#endif
-
 /* The set of hard registers in eliminables[i].from. */
 
 static HARD_REG_SET elim_reg_set;
@@ -3523,7 +3516,7 @@ df_get_entry_block_def_set (bitmap entry
 
   /* Once the prologue has been generated, all of these registers
      should just show up in the first regular block.  */
-  if (HAVE_prologue && epilogue_completed)
+  if (targetm.have_prologue () && epilogue_completed)
     {
       /* Defs for the callee saved registers are inserted so that the
 	 pushes have some defining location.  */
@@ -3701,7 +3694,7 @@ df_get_exit_block_use_set (bitmap exit_b
     if (global_regs[i] || EPILOGUE_USES (i))
       bitmap_set_bit (exit_block_uses, i);
 
-  if (HAVE_epilogue && epilogue_completed)
+  if (targetm.have_epilogue () && epilogue_completed)
     {
       /* Mark all call-saved registers that we actually used.  */
       for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
@@ -3721,7 +3714,7 @@ df_get_exit_block_use_set (bitmap exit_b
       }
 
 #ifdef EH_RETURN_STACKADJ_RTX
-  if ((!HAVE_epilogue || ! epilogue_completed)
+  if ((!targetm.have_epilogue () || ! epilogue_completed)
       && crtl->calls_eh_return)
     {
       rtx tmp = EH_RETURN_STACKADJ_RTX;
@@ -3731,7 +3724,7 @@ df_get_exit_block_use_set (bitmap exit_b
 #endif
 
 #ifdef EH_RETURN_HANDLER_RTX
-  if ((!HAVE_epilogue || ! epilogue_completed)
+  if ((!targetm.have_epilogue () || ! epilogue_completed)
       && crtl->calls_eh_return)
     {
       rtx tmp = EH_RETURN_HANDLER_RTX;
Index: gcc/dwarf2cfi.c
===================================================================
--- gcc/dwarf2cfi.c	2015-06-30 21:54:23.984536147 +0100
+++ gcc/dwarf2cfi.c	2015-06-30 21:54:23.976536238 +0100
@@ -3476,11 +3476,10 @@ const pass_data pass_data_dwarf2_frame =
 bool
 pass_dwarf2_frame::gate (function *)
 {
-#ifndef HAVE_prologue
   /* Targets which still implement the prologue in assembler text
      cannot use the generic dwarf2 unwinding.  */
-  return false;
-#endif
+  if (!targetm.have_prologue ())
+    return false;
 
   /* ??? What to do for UI_TARGET unwinding?  They might be able to benefit
      from the optimized shrink-wrapping annotations that we will compute.
Index: gcc/final.c
===================================================================
--- gcc/final.c	2015-06-30 21:54:23.984536147 +0100
+++ gcc/final.c	2015-06-30 21:54:23.976536238 +0100
@@ -1803,12 +1803,8 @@ final_start_function (rtx_insn *first, F
      if the profiling code comes after the prologue.  */
   if (targetm.profile_before_prologue () && crtl->profile)
     {
-      if (targetm.asm_out.function_prologue
-	  == default_function_pro_epilogue
-#ifdef HAVE_prologue
-	  && HAVE_prologue
-#endif
-	 )
+      if (targetm.asm_out.function_prologue == default_function_pro_epilogue
+	  && targetm.have_prologue ())
 	{
 	  rtx_insn *insn;
 	  for (insn = first; insn; insn = NEXT_INSN (insn))
@@ -1864,9 +1860,7 @@ final_start_function (rtx_insn *first, F
 
   /* If the machine represents the prologue as RTL, the profiling code must
      be emitted when NOTE_INSN_PROLOGUE_END is scanned.  */
-#ifdef HAVE_prologue
-  if (! HAVE_prologue)
-#endif
+  if (! targetm.have_prologue ())
     profile_after_prologue (file);
 }
 
Index: gcc/function.c
===================================================================
--- gcc/function.c	2015-06-30 21:54:23.984536147 +0100
+++ gcc/function.c	2015-06-30 21:54:23.976536238 +0100
@@ -5864,11 +5864,10 @@ thread_prologue_and_epilogue_insns (void
     }
 
   prologue_seq = NULL;
-#ifdef HAVE_prologue
-  if (HAVE_prologue)
+  if (targetm.have_prologue ())
     {
       start_sequence ();
-      rtx_insn *seq = safe_as_a <rtx_insn *> (gen_prologue ());
+      rtx_insn *seq = targetm.gen_prologue ();
       emit_insn (seq);
 
       /* Insert an explicit USE for the frame pointer
@@ -5890,7 +5889,6 @@ thread_prologue_and_epilogue_insns (void
       end_sequence ();
       set_insn_locations (prologue_seq, prologue_location);
     }
-#endif
 
   bitmap_initialize (&bb_flags, &bitmap_default_obstack);
 
@@ -5995,11 +5993,11 @@ thread_prologue_and_epilogue_insns (void
   if (exit_fallthru_edge == NULL)
     goto epilogue_done;
 
-  if (HAVE_epilogue)
+  if (targetm.have_epilogue ())
     {
       start_sequence ();
       epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
-      rtx_insn *seq = as_a <rtx_insn *> (gen_epilogue ());
+      rtx_insn *seq = targetm.gen_epilogue ();
       if (seq)
 	emit_jump_insn (seq);
 
@@ -6070,7 +6068,6 @@ thread_prologue_and_epilogue_insns (void
     convert_to_simple_return (entry_edge, orig_entry_edge, bb_flags,
 			      returnjump, unconverted_simple_returns);
 
-#ifdef HAVE_sibcall_epilogue
   /* Emit sibling epilogues before any sibling call sites.  */
   for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); (e =
 							     ei_safe_edge (ei));
@@ -6078,7 +6075,6 @@ thread_prologue_and_epilogue_insns (void
     {
       basic_block bb = e->src;
       rtx_insn *insn = BB_END (bb);
-      rtx ep_seq;
 
       if (!CALL_P (insn)
 	  || ! SIBLING_CALL_P (insn)
@@ -6090,8 +6086,7 @@ thread_prologue_and_epilogue_insns (void
 	  continue;
 	}
 
-      ep_seq = gen_sibcall_epilogue ();
-      if (ep_seq)
+      if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
 	{
 	  start_sequence ();
 	  emit_note (NOTE_INSN_EPILOGUE_BEG);
@@ -6109,7 +6104,6 @@ thread_prologue_and_epilogue_insns (void
 	}
       ei_next (&ei);
     }
-#endif
 
   if (epilogue_end)
     {
@@ -6143,10 +6137,10 @@ thread_prologue_and_epilogue_insns (void
 void
 reposition_prologue_and_epilogue_notes (void)
 {
-#if ! defined (HAVE_prologue) && ! defined (HAVE_sibcall_epilogue)
-  if (!HAVE_epilogue)
+  if (!targetm.have_prologue ()
+      && !targetm.have_epilogue ()
+      && !targetm.have_sibcall_epilogue ())
     return;
-#endif
 
   /* Since the hash table is created on demand, the fact that it is
      non-null is a signal that it is non-empty.  */
Index: gcc/reorg.c
===================================================================
--- gcc/reorg.c	2015-06-30 21:54:23.984536147 +0100
+++ gcc/reorg.c	2015-06-30 21:54:23.976536238 +0100
@@ -473,7 +473,7 @@ find_end_label (rtx kind)
 	}
       else
 	{
-	  if (HAVE_epilogue && ! targetm.have_return ())
+	  if (targetm.have_epilogue () && ! targetm.have_return ())
 	    /* The RETURN insn has its delay slot filled so we cannot
 	       emit the label just before it.  Since we already have
 	       an epilogue and cannot emit a new RETURN, we cannot
Index: gcc/toplev.c
===================================================================
--- gcc/toplev.c	2015-06-30 21:54:23.984536147 +0100
+++ gcc/toplev.c	2015-06-30 21:54:23.976536238 +0100
@@ -112,10 +112,6 @@ Software Foundation; either version 3, o
 				   declarations for e.g. AIX 4.x.  */
 #endif
 
-#ifndef HAVE_prologue
-#define HAVE_prologue 0
-#endif
-
 #include <new>
 
 static void general_init (const char *, bool);
@@ -1660,7 +1656,7 @@ process_options (void)
 
  /* Do not use IPA optimizations for register allocation if profiler is active
     or port does not emit prologue and epilogue as RTL.  */
-  if (profile_flag || !HAVE_prologue || !HAVE_epilogue)
+  if (profile_flag || !targetm.have_prologue () || !targetm.have_epilogue ())
     flag_ipa_ra = 0;
 
   /* Enable -Werror=coverage-mismatch when -Werror and -Wno-error



More information about the Gcc-patches mailing list