This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

i386/prologues: ROP mitigation for normal function epilogues


This is another step to flesh out -mmitigate-rop for i386 a little more. The basic idea was (I think) Richard Henderson's: if we could arrange to have every return preceded by a leave instruction, it would make it harder to construct an attack since it takes away a certain amount of control over the stack pointer. I extended this to move the leave/ret pair to libgcc, preceded by a sequence of nops, so as to take away the possibility of jumping into the middle of an instruction preceding the leave/ret pair and thereby skipping the leave.

Outside of the i386 changes, this adds a new optional prologue component that is always placed at function entry. There's already a use this for this in the the static chain on stack functionality.

This has survived a bootstrap and test both normally and with flag_mitigate_rop enabled by force in ix86_option_override. The former is completely clean. In the latter case, there are all sorts of scan-assembler testcases that fail, but that is to be expected. There's also some effect on guality, but other than that everything seems to be working. These tests were with a very slightly earlier version that was missing the have_entry_prologue test in function.c; will retest with this one as well.

This has a performance impact when -mmitigate-rop is enabled, I made some measurements a while ago and it looks like it's about twice the impact of -fno-omit-frame-pointer.


Bernd
	* config/i386/i386-protos.h (ix86_expand_entry_prologue): Declare.
	* config/i386/i386.c (ix86_frame_pointer_required): True if
	flag_mitigate_rop.
	(ix86_compute_frame_layout): Determine whether to use ROP returns,
	and adjust save_regs_using_mov for it.
	(ix86_expand_entry_prologue): New function.
	(ix86_expand_prologue): Move parts from here into it.  Deal with
	the rop return variant.
	(ix86_expand_epilogue): Deal with the rop return variant.
	(ix86_expand_call): For sibcalls with flag_mitigate_rop, show a
	clobber and use of the hard frame pointer.
	(ix86_output_call_insn): For sibling calls, if using rop returns,
	emit a leave.
	(ix86_pad_returns): Skip if using rop returns.
	* config/i386/i386.h (struct machine_function): New field
	use_rop_ret.
	* config/i386/i386.md (sibcall peepholes): Disallow loads from
	memory locations involving the hard frame pointer.
	(return): Explicitly call gen_simple_return.
	(simple_return): Generate simple_return_leave_internal<mode> if
	necessary.
	(simple_return_internal): Assert we're not using rop returns.
	(simple_return_leave_internal<mode>): New pattern.
	(entry_prologue): New pattern.
	* function.c (make_entry_prologue_seq): New static function.
	(thread_prologue_and_epilogue_insns): Call it and emit the
	sequence.
	* target-insns.def (entry_prologue): Add.

libgcc/
	* config/i386/t-linux (LIB2ADD_ST): New, to add ropret.S.
	* config/i386/ropret.S: New file.

Index: gcc/config/i386/i386-protos.h
===================================================================
--- gcc/config/i386/i386-protos.h	(revision 237310)
+++ gcc/config/i386/i386-protos.h	(working copy)
@@ -33,6 +33,7 @@ extern void ix86_expand_prologue (void);
 extern void ix86_maybe_emit_epilogue_vzeroupper (void);
 extern void ix86_expand_epilogue (int);
 extern void ix86_expand_split_stack_prologue (void);
+extern void ix86_expand_entry_prologue (void);
 
 extern void ix86_output_addr_vec_elt (FILE *, int);
 extern void ix86_output_addr_diff_elt (FILE *, int, int);
Index: gcc/config/i386/i386.c
===================================================================
--- gcc/config/i386/i386.c	(revision 237310)
+++ gcc/config/i386/i386.c	(working copy)
@@ -11529,6 +11530,9 @@ ix86_can_use_return_insn_p (void)
 static bool
 ix86_frame_pointer_required (void)
 {
+  if (flag_mitigate_rop)
+    return true;
+
   /* If we accessed previous frames, then the generated code expects
      to be able to access the saved ebp value in our frame.  */
   if (cfun->machine->accesses_prev_frame)
@@ -12102,11 +12106,21 @@ ix86_compute_frame_layout (struct ix86_f
 	   = !expensive_function_p (count);
     }
 
-  frame->save_regs_using_mov
-    = (TARGET_PROLOGUE_USING_MOVE && cfun->machine->use_fast_prologue_epilogue
-       /* If static stack checking is enabled and done with probes,
-	  the registers need to be saved before allocating the frame.  */
-       && flag_stack_check != STATIC_BUILTIN_STACK_CHECK);
+  cfun->machine->use_rop_ret = (flag_mitigate_rop
+				&& !TARGET_SEH
+				&& !stack_realign_drap
+				&& crtl->args.pops_args == 0
+				&& !crtl->calls_eh_return
+				&& !ix86_static_chain_on_stack);
+
+  if (cfun->machine->use_rop_ret)
+    frame->save_regs_using_mov = true;
+  else
+    frame->save_regs_using_mov
+      = (TARGET_PROLOGUE_USING_MOVE && cfun->machine->use_fast_prologue_epilogue
+	 /* If static stack checking is enabled and done with probes,
+	    the registers need to be saved before allocating the frame.  */
+	 && flag_stack_check != STATIC_BUILTIN_STACK_CHECK);
 
   /* Skip return address.  */
   offset = UNITS_PER_WORD;
@@ -13309,6 +13323,45 @@ ix86_elim_entry_set_got (rtx reg)
     }
 }
 
+/* Expand the parts of the prologue that must always be present at
+   function start.  */
+
+void
+ix86_expand_entry_prologue (void)
+{
+  struct ix86_frame frame;
+  struct machine_function *m = cfun->machine;
+
+  ix86_finalize_stack_realign_flags ();
+
+  /* The first insn of a function that accepts its static chain on the
+     stack is to push the register that would be filled in by a direct
+     call.  This insn will be skipped by the trampoline.  */
+  if (ix86_static_chain_on_stack)
+    {
+      rtx static_chain = ix86_static_chain (cfun->decl, false);
+      rtx_insn *insn = emit_insn (gen_push (static_chain));
+      emit_insn (gen_blockage ());
+
+      /* We don't want to interpret this push insn as a register save,
+	 only as a stack adjustment.  The real copy of the register as
+	 a save will be done later, if needed.  */
+      rtx t = plus_constant (Pmode, stack_pointer_rtx, -UNITS_PER_WORD);
+      t = gen_rtx_SET (stack_pointer_rtx, t);
+      add_reg_note (insn, REG_CFA_ADJUST_CFA, t);
+      RTX_FRAME_RELATED_P (insn) = 1;
+    }
+
+  ix86_compute_frame_layout (&frame);
+  if (m->use_rop_ret)
+    {
+      rtx_insn *insn = emit_insn (gen_push (hard_frame_pointer_rtx));
+      RTX_FRAME_RELATED_P (insn) = 1;
+      insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
+      RTX_FRAME_RELATED_P (insn) = 1;
+    }
+}
+
 /* Expand the prologue into a bunch of separate insns.  */
 
 void
@@ -13320,7 +13373,6 @@ ix86_expand_prologue (void)
   HOST_WIDE_INT allocate;
   bool int_registers_saved;
   bool sse_registers_saved;
-  rtx static_chain = NULL_RTX;
 
   ix86_finalize_stack_realign_flags ();
 
@@ -13341,6 +13393,13 @@ ix86_expand_prologue (void)
 
   ix86_compute_frame_layout (&frame);
 
+  rtx static_chain = NULL_RTX;
+  if (ix86_static_chain_on_stack)
+    static_chain = ix86_static_chain (cfun->decl, false);
+  int factor = !!ix86_static_chain_on_stack + !!m->use_rop_ret;
+  m->fs.sp_offset += factor * UNITS_PER_WORD;
+  m->fs.cfa_offset += factor * UNITS_PER_WORD;
+
   if (!TARGET_64BIT && ix86_function_ms_hook_prologue (current_function_decl))
     {
       /* We should have already generated an error for any use of
@@ -13414,24 +13473,6 @@ ix86_expand_prologue (void)
 	}
     }
 
-  /* The first insn of a function that accepts its static chain on the
-     stack is to push the register that would be filled in by a direct
-     call.  This insn will be skipped by the trampoline.  */
-  else if (ix86_static_chain_on_stack)
-    {
-      static_chain = ix86_static_chain (cfun->decl, false);
-      insn = emit_insn (gen_push (static_chain));
-      emit_insn (gen_blockage ());
-
-      /* We don't want to interpret this push insn as a register save,
-	 only as a stack adjustment.  The real copy of the register as
-	 a save will be done later, if needed.  */
-      t = plus_constant (Pmode, stack_pointer_rtx, -UNITS_PER_WORD);
-      t = gen_rtx_SET (stack_pointer_rtx, t);
-      add_reg_note (insn, REG_CFA_ADJUST_CFA, t);
-      RTX_FRAME_RELATED_P (insn) = 1;
-    }
-
   /* Emit prologue code to adjust stack alignment and setup DRAP, in case
      of DRAP is needed and stack realignment is really needed after reload */
   if (stack_realign_drap)
@@ -13494,10 +13535,11 @@ ix86_expand_prologue (void)
 
   if (frame_pointer_needed && !m->fs.fp_valid)
     {
-      /* Note: AT&T enter does NOT have reversed args.  Enter is probably
-         slower on all targets.  Also sdb doesn't like it.  */
-      insn = emit_insn (gen_push (hard_frame_pointer_rtx));
-      RTX_FRAME_RELATED_P (insn) = 1;
+      if (!m->use_rop_ret)
+	{
+	  insn = emit_insn (gen_push (hard_frame_pointer_rtx));
+	  RTX_FRAME_RELATED_P (insn) = 1;
+	}
 
       /* Push registers now, before setting the frame pointer
 	 on SEH target.  */
@@ -13512,8 +13554,12 @@ ix86_expand_prologue (void)
 
       if (m->fs.sp_offset == frame.hard_frame_pointer_offset)
 	{
-	  insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
-	  RTX_FRAME_RELATED_P (insn) = 1;
+	  if (!m->use_rop_ret)
+	    {
+	      insn = emit_move_insn (hard_frame_pointer_rtx,
+				     stack_pointer_rtx);
+	      RTX_FRAME_RELATED_P (insn) = 1;
+	    }
 
 	  if (m->fs.cfa_reg == stack_pointer_rtx)
 	    m->fs.cfa_reg = hard_frame_pointer_rtx;
@@ -14225,6 +14271,9 @@ ix86_expand_epilogue (int style)
      then do so now.  */
   if (m->fs.fp_valid)
     {
+      if (m->use_rop_ret)
+	goto skip_validity_checks;
+
       /* If the stack pointer is valid and pointing at the frame
 	 pointer store address, then we only need a pop.  */
       if (m->fs.sp_valid && m->fs.sp_offset == frame.hfp_save_offset)
@@ -14290,7 +14339,10 @@ ix86_expand_epilogue (int style)
 				 style, true);
     }
   else
-    ix86_add_queued_cfa_restore_notes (get_last_insn ());
+    {
+    skip_validity_checks:
+      ix86_add_queued_cfa_restore_notes (get_last_insn ());
+    }
 
   /* Sibcall epilogues don't want a return instruction.  */
   if (style == 0)
@@ -14344,6 +14396,13 @@ ix86_expand_epilogue (int style)
       else
 	emit_jump_insn (gen_simple_return_pop_internal (popc));
     }
+  else if (m->use_rop_ret)
+    {
+      if (Pmode == DImode)
+	emit_jump_insn (gen_simple_return_leave_internaldi ());
+      else
+	emit_jump_insn (gen_simple_return_leave_internalsi ());
+    }
   else
     emit_jump_insn (gen_simple_return_internal ());
 
@@ -27870,7 +27929,11 @@ ix86_expand_call (rtx retval, rtx fnaddr
 	  clobber_reg (&use, gen_rtx_REG (mode, regno));
 	}
     }
-
+  if (flag_mitigate_rop && sibcall)
+    {
+      clobber_reg (&use, hard_frame_pointer_rtx);
+      use_reg (&use, hard_frame_pointer_rtx);
+    }
   if (vec_len > 1)
     call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (vec_len, vec));
   call = emit_call_insn (call);
@@ -27916,6 +27979,9 @@ ix86_output_call_insn (rtx_insn *insn, r
 
   if (SIBLING_CALL_P (insn))
     {
+      if (cfun->machine->use_rop_ret)
+	output_asm_insn ("leave", &call_op);
+
       if (direct_p)
 	{
 	  if (ix86_nopic_noplt_attribute_p (call_op))
@@ -45813,6 +45879,10 @@ ix86_pad_returns (void)
   edge e;
   edge_iterator ei;
 
+  if (cfun->machine->use_rop_ret)
+    /* Returns go through __rop_ret in libgcc.  */
+    return;
+
   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
     {
       basic_block bb = e->src;
@@ -46234,7 +46304,7 @@ ix86_reorg (void)
 
   if (flag_mitigate_rop)
     ix86_mitigate_rop ();
-  
+
   if (TARGET_SEH && current_function_has_exception_handlers ())
     ix86_seh_fixup_eh_fallthru ();
 
Index: gcc/config/i386/i386.h
===================================================================
--- gcc/config/i386/i386.h	(revision 237310)
+++ gcc/config/i386/i386.h	(working copy)
@@ -2564,6 +2564,10 @@ struct GTY(()) machine_function {
      pass arguments and can be used for indirect sibcall.  */
   BOOL_BITFIELD arg_reg_available : 1;
 
+  /* If true, we use an enter/leave pair, where the leave is part of a
+     return stub in libgcc.  */
+  BOOL_BITFIELD use_rop_ret : 1;
+
   /* During prologue/epilogue generation, the current frame state.
      Otherwise, the frame state at the end of the prologue.  */
   struct machine_frame_state fs;
Index: gcc/config/i386/i386.md
===================================================================
--- gcc/config/i386/i386.md	(revision 237310)
+++ gcc/config/i386/i386.md	(working copy)
@@ -12000,7 +12000,8 @@ (define_peephole2
 	 (match_operand 3))]
   "!TARGET_X32 && SIBLING_CALL_P (peep2_next_insn (1))
    && !reg_mentioned_p (operands[0],
-			CALL_INSN_FUNCTION_USAGE (peep2_next_insn (1)))"
+			CALL_INSN_FUNCTION_USAGE (peep2_next_insn (1)))
+   && !reg_mentioned_p (hard_frame_pointer_rtx, operands[1])"
   [(parallel [(call (mem:QI (match_dup 1))
 		    (match_dup 3))
 	      (unspec [(const_int 0)] UNSPEC_PEEPSIB)])])
@@ -12013,7 +12014,8 @@ (define_peephole2
 	 (match_operand 3))]
   "!TARGET_X32 && SIBLING_CALL_P (peep2_next_insn (2))
    && !reg_mentioned_p (operands[0],
-			CALL_INSN_FUNCTION_USAGE (peep2_next_insn (2)))"
+			CALL_INSN_FUNCTION_USAGE (peep2_next_insn (2)))
+   && !reg_mentioned_p (hard_frame_pointer_rtx, operands[1])"
   [(unspec_volatile [(const_int 0)] UNSPECV_BLOCKAGE)
    (parallel [(call (mem:QI (match_dup 1))
 		    (match_dup 3))
@@ -12073,7 +12075,8 @@ (define_peephole2
 			    (match_operand:SI 4 "immediate_operand")))])]
   "!TARGET_64BIT && SIBLING_CALL_P (peep2_next_insn (1))
    && !reg_mentioned_p (operands[0],
-			CALL_INSN_FUNCTION_USAGE (peep2_next_insn (1)))"
+			CALL_INSN_FUNCTION_USAGE (peep2_next_insn (1)))
+   && !reg_mentioned_p (hard_frame_pointer_rtx, operands[1])"
   [(parallel [(call (mem:QI (match_dup 1))
 		    (match_dup 3))
 	      (set (reg:SI SP_REG)
@@ -12092,7 +12095,8 @@ (define_peephole2
 			    (match_operand:SI 4 "immediate_operand")))])]
   "!TARGET_64BIT && SIBLING_CALL_P (peep2_next_insn (2))
    && !reg_mentioned_p (operands[0],
-			CALL_INSN_FUNCTION_USAGE (peep2_next_insn (2)))"
+			CALL_INSN_FUNCTION_USAGE (peep2_next_insn (2)))
+   && !reg_mentioned_p (hard_frame_pointer_rtx, operands[1])"
   [(unspec_volatile [(const_int 0)] UNSPECV_BLOCKAGE)
    (parallel [(call (mem:QI (match_dup 1))
 		    (match_dup 3))
@@ -12200,7 +12204,8 @@ (define_peephole2
 		 (match_operand 3)))]
   "!TARGET_X32 && SIBLING_CALL_P (peep2_next_insn (1))
    && !reg_mentioned_p (operands[0],
-			CALL_INSN_FUNCTION_USAGE (peep2_next_insn (1)))"
+			CALL_INSN_FUNCTION_USAGE (peep2_next_insn (1)))
+   && !reg_mentioned_p (hard_frame_pointer_rtx, operands[1])"
   [(parallel [(set (match_dup 2)
 		   (call (mem:QI (match_dup 1))
 			 (match_dup 3)))
@@ -12215,7 +12220,8 @@ (define_peephole2
 	      (match_operand 3)))]
   "!TARGET_X32 && SIBLING_CALL_P (peep2_next_insn (2))
    && !reg_mentioned_p (operands[0],
-			CALL_INSN_FUNCTION_USAGE (peep2_next_insn (2)))"
+			CALL_INSN_FUNCTION_USAGE (peep2_next_insn (2)))
+   && !reg_mentioned_p (hard_frame_pointer_rtx, operands[1])"
   [(unspec_volatile [(const_int 0)] UNSPECV_BLOCKAGE)
    (parallel [(set (match_dup 2)
 		   (call (mem:QI (match_dup 1))
@@ -12281,7 +12287,8 @@ (define_peephole2
 			    (match_operand:SI 4 "immediate_operand")))])]
   "!TARGET_64BIT && SIBLING_CALL_P (peep2_next_insn (1))
    && !reg_mentioned_p (operands[0],
-			CALL_INSN_FUNCTION_USAGE (peep2_next_insn (1)))"
+			CALL_INSN_FUNCTION_USAGE (peep2_next_insn (1)))
+   && !reg_mentioned_p (hard_frame_pointer_rtx, operands[1])"
   [(parallel [(set (match_dup 2)
 		   (call (mem:QI (match_dup 1))
 			 (match_dup 3)))
@@ -12302,7 +12309,8 @@ (define_peephole2
 			    (match_operand:SI 4 "immediate_operand")))])]
   "!TARGET_64BIT && SIBLING_CALL_P (peep2_next_insn (2))
    && !reg_mentioned_p (operands[0],
-			CALL_INSN_FUNCTION_USAGE (peep2_next_insn (2)))"
+			CALL_INSN_FUNCTION_USAGE (peep2_next_insn (2)))
+   && !reg_mentioned_p (hard_frame_pointer_rtx, operands[1])"
   [(unspec_volatile [(const_int 0)] UNSPECV_BLOCKAGE)
    (parallel [(set (match_dup 2)
 		   (call (mem:QI (match_dup 1))
@@ -12408,6 +12416,8 @@ (define_expand "return"
       emit_jump_insn (gen_simple_return_pop_internal (popc));
       DONE;
     }
+  emit_jump_insn (gen_simple_return ());
+  DONE;
 })
 
 ;; We need to disable this for TARGET_SEH, as otherwise
@@ -12428,18 +12438,42 @@ (define_expand "simple_return"
       emit_jump_insn (gen_simple_return_pop_internal (popc));
       DONE;
     }
+  if (cfun->machine->use_rop_ret)
+    {
+      if (Pmode == DImode)
+	emit_jump_insn (gen_simple_return_leave_internaldi ());
+      else
+	emit_jump_insn (gen_simple_return_leave_internalsi ());
+      DONE;
+    }
 })
 
 (define_insn "simple_return_internal"
   [(simple_return)]
   "reload_completed"
-  "%!ret"
+{
+  gcc_assert (!cfun->machine->use_rop_ret);
+  return "%!ret";
+}
   [(set_attr "length" "1")
    (set_attr "atom_unit" "jeu")
    (set_attr "length_immediate" "0")
    (set_attr "modrm" "0")
    (set_attr "maybe_prefix_bnd" "1")])
 
+(define_insn "simple_return_leave_internal<mode>"
+  [(simple_return)
+   (set (reg:P SP_REG) (reg:P BP_REG))
+   (set (reg:P BP_REG) (mem:P (reg:P BP_REG)))]
+  "reload_completed"
+{
+  gcc_assert (cfun->machine->use_rop_ret);
+  return "%!jmp\t__rop_ret";
+}
+  [(set_attr "type" "ibr")
+   (set_attr "memory" "none")
+   (set_attr "length" "5")])
+
 (define_insn "interrupt_return"
   [(simple_return)
    (unspec [(const_int 0)] UNSPEC_INTERRUPT_RETURN)]
@@ -12675,6 +12709,14 @@ (define_expand "split_stack_prologue"
   DONE;
 })
 
+(define_expand "entry_prologue"
+  [(const_int 0)]
+  ""
+{
+  ix86_expand_entry_prologue ();
+  DONE;
+})
+
 ;; In order to support the call/return predictor, we use a return
 ;; instruction which the middle-end doesn't see.
 (define_insn "split_stack_return"
Index: gcc/function.c
===================================================================
--- gcc/function.c	(revision 237310)
+++ gcc/function.c	(working copy)
@@ -5828,6 +5828,27 @@ make_prologue_seq (void)
   return seq;
 }
 
+/* Return a sequence to be used as the entry prologue for the current
+   function, or NULL.  */
+
+static rtx_insn *
+make_entry_prologue_seq (void)
+{
+  if (!targetm.have_entry_prologue ())
+    return NULL;
+
+  start_sequence ();
+  emit_insn (targetm.gen_entry_prologue ());
+  rtx_insn *seq = get_insns ();
+
+  /* Retain a map of the prologue insns.  */
+  record_insns (seq, NULL, &prologue_insn_hash);
+  end_sequence ();
+  set_insn_locations (seq, prologue_location);
+
+  return seq;
+}
+
 /* Return a sequence to be used as the epilogue for the current function,
    or NULL.  */
 
@@ -5920,6 +5938,7 @@ thread_prologue_and_epilogue_insns (void
   edge orig_entry_edge = entry_edge;
 
   rtx_insn *split_prologue_seq = make_split_prologue_seq ();
+  rtx_insn *entry_prologue_seq = make_entry_prologue_seq ();
   rtx_insn *prologue_seq = make_prologue_seq ();
   rtx_insn *epilogue_seq = make_epilogue_seq ();
 
@@ -5929,7 +5948,6 @@ thread_prologue_and_epilogue_insns (void
 
   try_shrink_wrapping (&entry_edge, prologue_seq);
 
-
   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
 
   /* A small fib -- epilogue is not yet completed, but we wish to re-use
@@ -6004,11 +6022,14 @@ thread_prologue_and_epilogue_insns (void
 
   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
 
-  if (split_prologue_seq || prologue_seq)
+  if (split_prologue_seq || prologue_seq || entry_prologue_seq)
     {
       if (split_prologue_seq)
 	insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
 
+      if (entry_prologue_seq)
+	insert_insn_on_edge (entry_prologue_seq, orig_entry_edge);
+
       if (prologue_seq)
 	insert_insn_on_edge (prologue_seq, entry_edge);
 
Index: gcc/target-insns.def
===================================================================
--- gcc/target-insns.def	(revision 237310)
+++ gcc/target-insns.def	(working copy)
@@ -78,6 +78,7 @@ DEF_TARGET_INSN (restore_stack_block, (r
 DEF_TARGET_INSN (restore_stack_function, (rtx x0, rtx x1))
 DEF_TARGET_INSN (restore_stack_nonlocal, (rtx x0, rtx x1))
 DEF_TARGET_INSN (return, (void))
+DEF_TARGET_INSN (entry_prologue, (void))
 DEF_TARGET_INSN (save_stack_block, (rtx x0, rtx x1))
 DEF_TARGET_INSN (save_stack_function, (rtx x0, rtx x1))
 DEF_TARGET_INSN (save_stack_nonlocal, (rtx x0, rtx x1))
Index: libgcc/config/i386/t-linux
===================================================================
--- libgcc/config/i386/t-linux	(revision 237310)
+++ libgcc/config/i386/t-linux	(working copy)
@@ -4,3 +4,5 @@
 SHLIB_MAPFILES = libgcc-std.ver $(srcdir)/config/i386/libgcc-glibc.ver
 
 HOST_LIBGCC2_CFLAGS += -mlong-double-80 -DUSE_ELF_SYMVER
+
+LIB2ADD_ST += $(srcdir)/config/i386/ropret.S
Index: libgcc/config/i386/ropret.S
===================================================================
--- libgcc/config/i386/ropret.S	(revision 0)
+++ libgcc/config/i386/ropret.S	(working copy)
@@ -0,0 +1,67 @@
+# x86/x86_64 support for -mmitigate-rop.
+# Copyright (C) 2016 Free Software Foundation, Inc.
+
+# This file is part of GCC.
+
+# GCC is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free
+# Software Foundation; either version 3, or (at your option) any later
+# version.
+
+# GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+# for more details.
+
+# Under Section 7 of GPL version 3, you are granted additional
+# permissions described in the GCC Runtime Library Exception, version
+# 3.1, as published by the Free Software Foundation.
+
+# You should have received a copy of the GNU General Public License and
+# a copy of the GCC Runtime Library Exception along with this program;
+# see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
+# <http://www.gnu.org/licenses/>.
+
+__rop_ret_padding:
+	.cfi_startproc
+#ifndef __x86_64__
+	.cfi_def_cfa_offset 16
+	.cfi_offset %ebp, -16
+	.cfi_def_cfa_register %ebp
+#else
+	.cfi_def_cfa_offset 8
+	.cfi_offset %rbp, -8
+	.cfi_def_cfa_register %rbp
+#endif
+	nop
+	nop
+	nop
+	nop
+	nop
+	nop
+	nop
+	nop
+	nop
+	nop
+	nop
+	nop
+	nop
+	nop
+	nop
+	nop
+L1:
+	jmp	L1
+
+__rop_ret:
+	leave
+#ifndef __x86_64__
+	.cfi_restore 5
+	.cfi_def_cfa %esp, 4
+#else
+	.cfi_def_cfa %rsp, 8
+#endif
+	ret
+	.cfi_endproc
+	.size	__rop_ret_padding, . - __rop_ret_padding
+
+	.global	__rop_ret

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]