This is the mail archive of the
gcc-patches@gcc.gnu.org
mailing list for the GCC project.
Re: [PATCH] Avoid scheduler reordering trapping insns across function calls (PR rtl-optimization/41239)
On Mon, Sep 07, 2009 at 10:34:30AM -0700, Richard Henderson wrote:
> On 09/07/2009 10:24 AM, Jakub Jelinek wrote:
>> Do you mean something like this?
>
> Something like that. I'd prefer you used "noreturn" instead
> of "exit" throughout.
Ok, this passed bootstrap/regtested on x86_64-linux and i686-linux.
2009-09-07 Jakub Jelinek <jakub@redhat.com>
PR rtl-optimization/41239
* sched-int.h (struct deps): Add last_function_call_may_noreturn field.
* sched-rgn.c (deps_join): Join also last_function_call_may_noreturn
lists.
* sched-deps.c (sched_analyze_insn): Prevent moving trapping insns
across calls, as the calls might not always return normally.
(call_may_noreturn_p): New function.
(deps_analyze_insn): Update last_function_call_may_noreturn list.
(init_deps): Initialize it.
(remove_from_deps): Also remove calls from
last_function_call_may_noreturn list.
* gcc.c-torture/execute/pr41239.c: New test.
--- gcc/sched-rgn.c.jj 2009-09-03 14:02:04.000000000 +0200
+++ gcc/sched-rgn.c 2009-09-07 18:17:27.000000000 +0200
@@ -2645,6 +2645,11 @@ deps_join (struct deps *succ_deps, struc
= concat_INSN_LIST (pred_deps->last_function_call,
succ_deps->last_function_call);
+ /* last_function_call_may_noreturn is inherited by successor. */
+ succ_deps->last_function_call_may_noreturn
+ = concat_INSN_LIST (pred_deps->last_function_call_may_noreturn,
+ succ_deps->last_function_call_may_noreturn);
+
/* sched_before_next_call is inherited by successor. */
succ_deps->sched_before_next_call
= concat_INSN_LIST (pred_deps->sched_before_next_call,
--- gcc/sched-int.h.jj 2009-09-03 14:02:04.000000000 +0200
+++ gcc/sched-int.h 2009-09-07 18:16:51.000000000 +0200
@@ -502,6 +502,12 @@ struct deps
Used to prevent register lifetimes from expanding unnecessarily. */
rtx last_function_call;
+ /* A list of the last function calls that may not return normally
+ we have seen. We use a list to represent last function calls from
+ multiple predecessor blocks. Used to prevent moving trapping insns
+ across such calls. */
+ rtx last_function_call_may_noreturn;
+
/* A list of insns which use a pseudo register that does not already
cross a call. We create dependencies between each of those insn
and the next call insn, to ensure that they won't cross a call after
--- gcc/sched-deps.c.jj 2009-09-03 14:02:04.000000000 +0200
+++ gcc/sched-deps.c 2009-09-07 18:42:07.000000000 +0200
@@ -2593,6 +2593,12 @@ sched_analyze_insn (struct deps *deps, r
can_start_lhs_rhs_p = (NONJUMP_INSN_P (insn)
&& code == SET);
+ if (may_trap_p (x))
+ /* Avoid moving trapping instructions accross function calls that might
+ not always return. */
+ add_dependence_list (insn, deps->last_function_call_may_noreturn,
+ 1, REG_DEP_ANTI);
+
if (code == COND_EXEC)
{
sched_analyze_2 (deps, COND_EXEC_TEST (x), insn);
@@ -3109,6 +3115,73 @@ sched_analyze_insn (struct deps *deps, r
}
}
+/* Return TRUE if INSN might not always return normally (e.g. call exit,
+ longjmp, loop forever, ...). */
+static bool
+call_may_noreturn_p (rtx insn)
+{
+ rtx call;
+
+ /* const or pure calls that aren't looping will always return. */
+ if (RTL_CONST_OR_PURE_CALL_P (insn)
+ && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn))
+ return false;
+
+ call = PATTERN (insn);
+ if (GET_CODE (call) == PARALLEL)
+ call = XVECEXP (call, 0, 0);
+ if (GET_CODE (call) == SET)
+ call = SET_SRC (call);
+ if (GET_CODE (call) == CALL
+ && MEM_P (XEXP (call, 0))
+ && GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
+ {
+ rtx symbol = XEXP (XEXP (call, 0), 0);
+ if (SYMBOL_REF_DECL (symbol)
+ && TREE_CODE (SYMBOL_REF_DECL (symbol)) == FUNCTION_DECL)
+ {
+ if (DECL_BUILT_IN_CLASS (SYMBOL_REF_DECL (symbol))
+ == BUILT_IN_NORMAL)
+ switch (DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol)))
+ {
+ case BUILT_IN_BCMP:
+ case BUILT_IN_BCOPY:
+ case BUILT_IN_BZERO:
+ case BUILT_IN_INDEX:
+ case BUILT_IN_MEMCHR:
+ case BUILT_IN_MEMCMP:
+ case BUILT_IN_MEMCPY:
+ case BUILT_IN_MEMMOVE:
+ case BUILT_IN_MEMPCPY:
+ case BUILT_IN_MEMSET:
+ case BUILT_IN_RINDEX:
+ case BUILT_IN_STPCPY:
+ case BUILT_IN_STPNCPY:
+ case BUILT_IN_STRCAT:
+ case BUILT_IN_STRCHR:
+ case BUILT_IN_STRCMP:
+ case BUILT_IN_STRCPY:
+ case BUILT_IN_STRCSPN:
+ case BUILT_IN_STRLEN:
+ case BUILT_IN_STRNCAT:
+ case BUILT_IN_STRNCMP:
+ case BUILT_IN_STRNCPY:
+ case BUILT_IN_STRPBRK:
+ case BUILT_IN_STRRCHR:
+ case BUILT_IN_STRSPN:
+ case BUILT_IN_STRSTR:
+ /* Assume certain string/memory builtins always return. */
+ return false;
+ default:
+ break;
+ }
+ }
+ }
+
+ /* For all other calls assume that they might not always return. */
+ return true;
+}
+
/* Analyze INSN with DEPS as a context. */
void
deps_analyze_insn (struct deps *deps, rtx insn)
@@ -3207,7 +3280,16 @@ deps_analyze_insn (struct deps *deps, rt
/* Remember the last function call for limiting lifetimes. */
free_INSN_LIST_list (&deps->last_function_call);
deps->last_function_call = alloc_INSN_LIST (insn, NULL_RTX);
-
+
+ if (call_may_noreturn_p (insn))
+ {
+ /* Remember the last function call that might not always return
+ normally for limiting moves of trapping insns. */
+ free_INSN_LIST_list (&deps->last_function_call_may_noreturn);
+ deps->last_function_call_may_noreturn
+ = alloc_INSN_LIST (insn, NULL_RTX);
+ }
+
/* Before reload, begin a post-call group, so as to keep the
lifetimes of hard registers correct. */
if (! reload_completed)
@@ -3361,6 +3443,7 @@ init_deps (struct deps *deps)
deps->pending_flush_length = 0;
deps->last_pending_memory_flush = 0;
deps->last_function_call = 0;
+ deps->last_function_call_may_noreturn = 0;
deps->sched_before_next_call = 0;
deps->in_post_call_group_p = not_post_call;
deps->last_debug_insn = 0;
@@ -3441,7 +3524,11 @@ remove_from_deps (struct deps *deps, rtx
}
if (CALL_P (insn))
- remove_from_dependence_list (insn, &deps->last_function_call);
+ {
+ remove_from_dependence_list (insn, &deps->last_function_call);
+ remove_from_dependence_list (insn,
+ &deps->last_function_call_may_noreturn);
+ }
remove_from_dependence_list (insn, &deps->sched_before_next_call);
}
--- gcc/testsuite/gcc.c-torture/execute/pr41239.c.jj 2009-09-07 14:19:57.000000000 +0200
+++ gcc/testsuite/gcc.c-torture/execute/pr41239.c 2009-09-07 14:19:08.000000000 +0200
@@ -0,0 +1,67 @@
+/* PR rtl-optimization/41239 */
+
+struct S
+{
+ short nargs;
+ unsigned long arg[2];
+};
+
+extern void abort (void);
+extern void exit (int);
+extern char fn1 (int, const char *, int, const char *, const char *);
+extern void fn2 (int, ...);
+extern int fn3 (int);
+extern int fn4 (const char *fmt, ...) __attribute__ ((format (printf, 1, 2)));
+
+unsigned long
+test (struct S *x)
+{
+ signed int arg1 = x->arg[0];
+ long int arg2 = x->arg[1];
+
+ if (arg2 == 0)
+ (fn1 (20, "foo", 924, __func__, ((void *) 0))
+ ? (fn2 (fn3 (0x2040082), fn4 ("division by zero")))
+ : (void) 0);
+
+ return (long int) arg1 / arg2;
+}
+
+int
+main (void)
+{
+ struct S s = { 2, { 5, 0 } };
+ test (&s);
+ abort ();
+}
+
+__attribute__((noinline)) char
+fn1 (int x, const char *y, int z, const char *w, const char *v)
+{
+ asm volatile ("" : : "r" (w), "r" (v) : "memory");
+ asm volatile ("" : "+r" (x) : "r" (y), "r" (z) : "memory");
+ return x;
+}
+
+__attribute__((noinline)) int
+fn3 (int x)
+{
+ asm volatile ("" : "+r" (x) : : "memory");
+ return x;
+}
+
+__attribute__((noinline)) int
+fn4 (const char *x, ...)
+{
+ asm volatile ("" : "+r" (x) : : "memory");
+ return *x;
+}
+
+__attribute__((noinline)) void
+fn2 (int x, ...)
+{
+ asm volatile ("" : "+r" (x) : : "memory");
+ if (x)
+ /* Could be a longjmp or throw too. */
+ exit (0);
+}
Jakub