This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[csl-arm] Armv7M interrupt routines


Armv7M does not use conventional Arm system/exception model. The exception 
model is designed so that Interrupt service routines an be coded directly in 
C. However ISR functions may be called with a 4-byte aligned stack pointer. 
The EABI requires 8-byte SP alignment at public function boundaries. 

The attached patch makes functions annotated with __attribute__((interrupt)) 
dynamically enforce proper stack alignment.

Tested with cross to arm-none-eabi.
Applied to branches/csl/arm-4_1

Paul

2006-03-01  Paul Brook  <paul@codesourcery.com>

	* doc/extend.texi: Document ARMv7 interrupt attribute.
	* config/arm/arm.c (arm_isr_value): Check arm_arch_notm.
	(use_return_insn): Return 0 is SP needs realignment.
	(arm_function_ok_for_sibcall): Ditto.
	(arm_output_function_prologue): Add IS_STACKALIGN comment.
	(arm_output_epilogue): Restore SP from stack if it needed realignment.
	(arm_expand_prologue): Add dynamic stack alignment code.
	(-arm_unwind_emit_stm): Rename...
	(arm_unwind_emit_sequence): ... to this.  Handle popping SP.
	* config/arm/arm.h (ARM_FT_STACKALIGN, IS_STACKALIGN): Define.
	* g++.dg/other/armv7m-1.c: New test.
Index: gcc/doc/extend.texi
===================================================================
--- gcc/doc/extend.texi	(revision 111572)
+++ gcc/doc/extend.texi	(working copy)
@@ -1906,6 +1906,9 @@ void f () __attribute__ ((interrupt ("IR
 
 Permissible values for this parameter are: IRQ, FIQ, SWI, ABORT and UNDEF@.
 
+On ARMv7M the interrupt type is ignored, and the attibute means the function
+may be called with a word aligned stack pointer.
+
 @item interrupt_handler
 @cindex interrupt handler functions on the Blackfin, m68k, H8/300 and SH processors
 Use this attribute on the Blackfin, m68k, H8/300, H8/300H, H8S, and SH to
Index: gcc/testsuite/g++.dg/other/armv7m-1.c
===================================================================
--- gcc/testsuite/g++.dg/other/armv7m-1.c	(revision 0)
+++ gcc/testsuite/g++.dg/other/armv7m-1.c	(revision 0)
@@ -0,0 +1,69 @@
+/* { dg-do run { target arm*-*-* } } */
+/* Test Armv7m interrupt routines.  */
+#include <stdlib.h>
+
+#ifdef __ARM_ARCH_7M__
+void __attribute__((interrupt))
+foo(void)
+{
+  long long n;
+  long p;
+  asm volatile ("" : "=r" (p) : "0" (&n));
+  if (p & 4)
+    abort ();
+  return;
+}
+
+void __attribute__((interrupt))
+bar(void)
+{
+  throw 42;
+}
+
+int main()
+{
+  int a;
+  int before;
+  int after;
+  volatile register int sp asm("sp");
+
+  asm volatile ("mov %0, sp\n"
+		"blx %2\n"
+		"mov %1, sp\n"
+		: "=&r" (before), "=r" (after) : "r" (foo)
+		: "memory", "cc", "r0", "r1", "r2", "r3", "ip", "lr");
+  if (before != after)
+    abort();
+  asm volatile ("mov %0, sp\n"
+		"sub sp, sp, #4\n"
+		"blx %2\n"
+		"add sp, sp, #4\n"
+		"mov %1, sp\n"
+		: "=&r" (before), "=r" (after) : "r" (foo)
+		: "memory", "cc", "r0", "r1", "r2", "r3", "ip", "lr");
+  if (before != after)
+    abort();
+  before = sp;
+  try
+    {
+      bar();
+    }
+  catch (int i)
+    {
+      if (i != 42)
+	abort();
+    }
+  catch (...)
+    {
+      abort();
+    }
+  if (before != sp)
+    abort();
+  exit(0);
+}
+#else
+int main()
+{
+  exit (0);
+}
+#endif
Index: gcc/config/arm/arm.c
===================================================================
--- gcc/config/arm/arm.c	(revision 111572)
+++ gcc/config/arm/arm.c	(working copy)
@@ -1419,6 +1419,9 @@ arm_isr_value (tree argument)
   const isr_attribute_arg * ptr;
   const char *              arg;
 
+  if (!arm_arch_notm)
+    return ARM_FT_NORMAL | ARM_FT_STACKALIGN;
+
   /* No argument - default to IRQ.  */
   if (argument == NULL_TREE)
     return ARM_FT_ISR;
@@ -1512,9 +1515,9 @@ use_return_insn (int iscond, rtx sibling
 
   func_type = arm_current_func_type ();
 
-  /* Naked functions and volatile functions need special
+  /* Naked, volatile and stack alignment functions need special
      consideration.  */
-  if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))
+  if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED | ARM_FT_STACKALIGN))
     return 0;
 
   /* So do interrupt functions that use the frame pointer.  */
@@ -3216,6 +3219,7 @@ static bool
 arm_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
 {
   int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
+  unsigned long func_type;
 
   if (cfun->machine->sibcall_blocked)
     return false;
@@ -3243,8 +3247,13 @@ arm_function_ok_for_sibcall (tree decl, 
   if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
     return false;
 
+  func_type = arm_current_func_type ();
   /* Never tailcall from an ISR routine - it needs a special exit sequence.  */
-  if (IS_INTERRUPT (arm_current_func_type ()))
+  if (IS_INTERRUPT (func_type))
+    return false;
+
+  /* Never tailcall if function may be called with a misaligned SP.  */
+  if (IS_STACKALIGN (func_type))
     return false;
 
   /* Everything else is ok.  */
@@ -10018,6 +10027,8 @@ arm_output_function_prologue (FILE *f, H
 
   if (IS_NESTED (func_type))
     asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
+  if (IS_STACKALIGN (func_type))
+    asm_fprintf (f, "\t%@ Stack Align: May be called with mis-aligned SP.\n");
 
   asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %wd\n",
 	       current_function_args_size,
@@ -10334,6 +10345,7 @@ arm_output_epilogue (rtx sibling)
 
       /* If we can, restore the LR into the PC.  */
       if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
+	  && !IS_STACKALIGN (func_type)
 	  && really_return
 	  && current_function_pretend_args_size == 0
 	  && saved_regs_mask & (1 << LR_REGNUM)
@@ -10344,8 +10356,9 @@ arm_output_epilogue (rtx sibling)
 	}
 
       /* Load the registers off the stack.  If we only have one register
-	 to load use the LDR instruction - it is faster.  */
-      if (saved_regs_mask == (1 << LR_REGNUM))
+	 to load use the LDR instruction - it is faster.  For Thumb-2
+	 always use pop and the assembler will pick the best instruction.*/
+      if (TARGET_ARM && saved_regs_mask == (1 << LR_REGNUM))
 	{
 	  asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
 	}
@@ -10398,6 +10411,12 @@ arm_output_epilogue (rtx sibling)
       break;
 
     default:
+      if (IS_STACKALIGN (func_type))
+	{
+	  /* See comment in arm_expand_prologue.  */
+	  asm_fprintf (f, "\tpop\t{%r}\n", IP_REGNUM);
+	  asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, IP_REGNUM);
+	}
       if (arm_arch5 || arm_arch4t)
 	asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
       else
@@ -11058,6 +11077,48 @@ arm_expand_prologue (void)
 
   ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
 
+  if (IS_STACKALIGN (func_type))
+    {
+      rtx dwarf;
+      /* Handle a word-aligned stack pointer.  We generate the following:
+
+	  mov ip, sp
+	  push {ip}
+	  push {ip}
+	  sub ip, ip, #4
+	  bic ip, ip, #4
+	  mov sp, ip
+	  <Normal prologue+body+epilogue, leaving return address in LR>
+	  pop {ip}
+	  mov sp, ip
+	  bx lr
+
+	 Generating accurate unwind information for this is tricky.  Instead
+	 we just tell the unwinder that the first instruction pushes
+	 sp onto the stack.  */
+      gcc_assert (TARGET_THUMB2 && !arm_arch_notm && args_to_push == 0);
+
+      dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
+      XVECEXP (dwarf, 0, 0) = 
+	gen_rtx_SET (VOIDmode, gen_rtx_MEM (SImode,
+		     gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (-4))),
+		     stack_pointer_rtx);
+      XVECEXP (dwarf, 0, 1) = 
+	gen_rtx_SET (VOIDmode, stack_pointer_rtx,
+		     gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT(4)));
+
+      insn = gen_movsi (ip_rtx, stack_pointer_rtx);
+      RTX_FRAME_RELATED_P (insn) = 1;
+      REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
+					    dwarf, REG_NOTES (insn));
+      emit_insn (insn);
+      insn = emit_multi_reg_push (1 << IP_REGNUM);
+      insn = emit_multi_reg_push (1 << IP_REGNUM);
+      emit_insn (gen_addsi3 (ip_rtx, ip_rtx, GEN_INT (-4)));
+      emit_insn (gen_andsi3 (ip_rtx, ip_rtx, GEN_INT (~(HOST_WIDE_INT)4)));
+      emit_insn (gen_movsi (stack_pointer_rtx, ip_rtx));
+    }
+
   if (frame_pointer_needed && TARGET_ARM)
     {
       if (IS_INTERRUPT (func_type))
@@ -15768,12 +15829,13 @@ arm_dbx_register_number (unsigned int re
 
 
 #ifdef TARGET_UNWIND_INFO
-/* Emit unwind directives for a store-multiple instruction.  This should
-   only ever be generated by the function prologue code, so we expect it
-   to have a particular form.  */
+/* Emit unwind directives for a store-multiple instruction or stack pointer
+   push during alignment.
+   These should only ever be generated by the function prologue code, so
+   expect them to have a particular form.  */
 
 static void
-arm_unwind_emit_stm (FILE * asm_out_file, rtx p)
+arm_unwind_emit_sequence (FILE * asm_out_file, rtx p)
 {
   int i;
   HOST_WIDE_INT offset;
@@ -15783,8 +15845,19 @@ arm_unwind_emit_stm (FILE * asm_out_file
   unsigned lastreg;
   rtx e;
 
-  /* First insn will adjust the stack pointer.  */
   e = XVECEXP (p, 0, 0);
+  if (GET_CODE (e) != SET)
+    abort ();
+
+  if (GET_CODE (XEXP (e, 0)) == MEM)
+    {
+      /* Stack pointer push to align SP.  */
+      gcc_assert (rtx_equal_p (XEXP (e, 1), stack_pointer_rtx));
+      asm_fprintf (asm_out_file, "\t.save {%r}\n", SP_REGNUM);
+      return;
+    }
+  
+  /* First insn will adjust the stack pointer.  */
   if (GET_CODE (e) != SET
       || GET_CODE (XEXP (e, 0)) != REG
       || REGNO (XEXP (e, 0)) != SP_REGNUM
@@ -15983,7 +16056,7 @@ arm_unwind_emit (FILE * asm_out_file, rt
 
     case SEQUENCE:
       /* Store multiple.  */
-      arm_unwind_emit_stm (asm_out_file, pat);
+      arm_unwind_emit_sequence (asm_out_file, pat);
       break;
 
     default:
Index: gcc/config/arm/arm.h
===================================================================
--- gcc/config/arm/arm.h	(revision 111572)
+++ gcc/config/arm/arm.h	(working copy)
@@ -1514,6 +1514,7 @@ do {									      \
 #define ARM_FT_NAKED		(1 << 3) /* No prologue or epilogue.  */
 #define ARM_FT_VOLATILE		(1 << 4) /* Does not return.  */
 #define ARM_FT_NESTED		(1 << 5) /* Embedded inside another func.  */
+#define ARM_FT_STACKALIGN	(1 << 6) /* Called with misaligned stack.  */
 
 /* Some macros to test these flags.  */
 #define ARM_FUNC_TYPE(t)	(t & ARM_FT_TYPE_MASK)
@@ -1521,6 +1522,7 @@ do {									      \
 #define IS_VOLATILE(t)     	(t & ARM_FT_VOLATILE)
 #define IS_NAKED(t)        	(t & ARM_FT_NAKED)
 #define IS_NESTED(t)       	(t & ARM_FT_NESTED)
+#define IS_STACKALIGN(t)       	(t & ARM_FT_STACKALIGN)
 
 
 /* Structure used to hold the function stack frame layout.  Offsets are

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]