This is the mail archive of the
gcc-patches@gcc.gnu.org
mailing list for the GCC project.
[PATCH] Use REG_EQUIV/REG_EQUAL notes for loads from MEM_READONLY_P memory in var-tracking (PR debug/43299)
- From: Jakub Jelinek <jakub at redhat dot com>
- To: Richard Guenther <rguenther at suse dot de>, Alexandre Oliva <aoliva at redhat dot com>
- Cc: gcc-patches at gcc dot gnu dot org
- Date: Tue, 9 Mar 2010 16:11:22 +0100
- Subject: [PATCH] Use REG_EQUIV/REG_EQUAL notes for loads from MEM_READONLY_P memory in var-tracking (PR debug/43299)
- Reply-to: Jakub Jelinek <jakub at redhat dot com>
Hi!
As discussed in this PR, this is something I very much doubt is ever
solvable using delegitimize_address, because delegitimizing the address
only afterwards is too late (although (mem (plus (r2) (const (unspec XXX))))
can be delegitimized to e.g. (symbol_ref YYY) at the point where it
is defined, if r2 is then modified, we end up with a memory with
non-delegitimizable address. This patch does something similar to
what e.g. DSE uses when using cselib - single sets from mem/u if
there is a REG_EQUIV/REG_EQUAL note with constant argument are handled
as if it was a direct set of the dest to the constant rather than memory
load.
Bootstrapped/regtested on x86_64-linux and i686-linux, powerpc64-linux
--with-cpu=default64 bootstrap/regtest pending. Ok for trunk?
2010-03-09 Jakub Jelinek <jakub@redhat.com>
PR debug/43299
* var-tracking.c (adjust_sets): New function.
(count_with_sets, add_with_sets): Use it.
(add_stores): For read-only MEM srcs prefer alternative
constants if adjust_sets changed something.
* gcc.dg/pr43299.c: New test.
--- gcc/var-tracking.c.jj 2010-03-08 18:57:08.000000000 +0100
+++ gcc/var-tracking.c 2010-03-09 12:25:04.000000000 +0100
@@ -4601,6 +4601,30 @@ count_stores (rtx loc, const_rtx expr AT
count_uses (&loc, cui);
}
+/* Adjust sets if needed. Currently this optimizes read-only MEM loads
+ if REG_EQUAL/REG_EQUIV note is present. */
+
+static void
+adjust_sets (rtx insn, struct cselib_set *sets, int n_sets)
+{
+ if (n_sets == 1 && MEM_P (sets[0].src) && MEM_READONLY_P (sets[0].src))
+ {
+ /* For read-only MEMs containing some constant, prefer those
+ constants. */
+ rtx note = find_reg_equal_equiv_note (insn), src;
+
+ if (note && CONSTANT_P (XEXP (note, 0)))
+ {
+ sets[0].src = src = XEXP (note, 0);
+ if (GET_CODE (PATTERN (insn)) == COND_EXEC)
+ src = gen_rtx_IF_THEN_ELSE (GET_MODE (sets[0].dest),
+ COND_EXEC_TEST (PATTERN (insn)),
+ src, sets[0].dest);
+ sets[0].src_elt = cselib_lookup (src, GET_MODE (sets[0].dest), 1);
+ }
+ }
+}
+
/* Callback for cselib_record_sets_hook, that counts how many micro
operations it takes for uses and stores in an insn after
cselib_record_sets has analyzed the sets in an insn, but before it
@@ -4617,6 +4641,8 @@ count_with_sets (rtx insn, struct cselib
cselib_hook_called = true;
+ adjust_sets (insn, sets, n_sets);
+
cui.insn = insn;
cui.bb = bb;
cui.sets = sets;
@@ -4971,7 +4997,15 @@ add_stores (rtx loc, const_rtx expr, voi
else
{
if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
- src = var_lowpart (mode2, SET_SRC (expr));
+ {
+ if (cui->n_sets == 1
+ && MEM_P (SET_SRC (expr))
+ && MEM_READONLY_P (SET_SRC (expr))
+ && CONSTANT_P (cui->sets[0].src))
+ src = var_lowpart (mode2, cui->sets[0].src);
+ else
+ src = var_lowpart (mode2, SET_SRC (expr));
+ }
loc = var_lowpart (mode2, loc);
if (src == NULL)
@@ -5030,7 +5064,15 @@ add_stores (rtx loc, const_rtx expr, voi
else
{
if (GET_CODE (expr) == SET && SET_DEST (expr) == loc)
- src = var_lowpart (mode2, SET_SRC (expr));
+ {
+ if (cui->n_sets == 1
+ && MEM_P (SET_SRC (expr))
+ && MEM_READONLY_P (SET_SRC (expr))
+ && CONSTANT_P (cui->sets[0].src))
+ src = var_lowpart (mode2, cui->sets[0].src);
+ else
+ src = var_lowpart (mode2, SET_SRC (expr));
+ }
loc = var_lowpart (mode2, loc);
if (src == NULL)
@@ -5099,12 +5141,18 @@ add_stores (rtx loc, const_rtx expr, voi
}
else if (resolve && GET_CODE (mo->u.loc) == SET)
{
- nloc = replace_expr_with_values (SET_SRC (expr));
+ src = SET_SRC (expr);
+ if (cui->n_sets == 1
+ && MEM_P (src)
+ && MEM_READONLY_P (src)
+ && CONSTANT_P (cui->sets[0].src))
+ src = cui->sets[0].src;
+ nloc = replace_expr_with_values (src);
/* Avoid the mode mismatch between oexpr and expr. */
if (!nloc && mode != mode2)
{
- nloc = SET_SRC (expr);
+ nloc = src;
gcc_assert (oloc == SET_DEST (expr));
}
@@ -5201,6 +5249,8 @@ add_with_sets (rtx insn, struct cselib_s
cselib_hook_called = true;
+ adjust_sets (insn, sets, n_sets);
+
cui.insn = insn;
cui.bb = bb;
cui.sets = sets;
--- gcc/testsuite/gcc.dg/pr43299.c.jj 2010-03-09 12:30:57.000000000 +0100
+++ gcc/testsuite/gcc.dg/pr43299.c 2010-03-09 12:31:18.000000000 +0100
@@ -0,0 +1,28 @@
+/* PR debug/43299 */
+/* { dg-do assemble } */
+/* { dg-options "-g -O2" } */
+
+extern void *emit_insn (void *);
+
+__attribute__((noinline))
+void *gen_load_locked_si (void *x, void *y)
+{
+ return x;
+}
+
+__attribute__((noinline))
+void *gen_load_locked_di (void *x, void *y)
+{
+ return x;
+}
+
+void
+emit_load_locked (int mode, void *reg, void *mem)
+{
+ void * (*fn) (void *, void *) = ((void *)0);
+ if (mode == 9)
+ fn = gen_load_locked_si;
+ else if (mode == 10)
+ fn = gen_load_locked_di;
+ emit_insn (fn (reg, mem));
+}
Jakub