[trans-mem] perform actual memory optimizations
Aldy Hernandez
aldyh@redhat.com
Mon Oct 5 11:16:00 GMT 2009
Here we do the actual replacements to the new builtins.
Out of the corner of my eye I saw some optimizations that weren't being
done, but they weren't being done before the patch. I'll fix that next.
OK for branch?
* tree.h (BUILTIN_TM_LOAD_STORE_P): New.
(BUILTIN_TM_LOAD_P): New.
(BUILTIN_TM_STORE_P): New.
* testsuite/gcc.dg/tm/memopt-2.c: Update for new dump code.
* tree-ssa-alias.c (ref_maybe_used_by_call_p_1): Replace load
cases by a call to BUILTIN_TM_LOAD_P.
(call_may_clobber_ref_p_1): Replace store cases by a call to
BUILTIN_TM_STORE_P.
* trans-mem.c (is_tm_load): Replace load cases by a call to
BUILTIN_TM_LOAD_P.
(is_tm_store): Replace store cases by a call to
BUILTIN_TM_STORE_P.
(dump_tm_memopt_transform): Do not use prefix; just dump the new
statement.
(TRANSFORM_*): New macros.
(tm_memopt_transform_stmt): Do the actual transformation.
(tm_memopt_transform_blocks): Change calls to
dump_tm_memopt_transform into calls to tm_memopt_transform_stmt.
* calls.c (special_function_p): Change TM load/store cases into a
call to BUILTIN_TM_LOAD_STORE_P.
* gtm-builtins.def: New builtins for the following variants: WaR,
WaW, RaR, RaW, RfW.
* tree-ssa-structalias.c (find_func_aliases): Use BUILTIN_TM_*_P
macros.
Index: tree.h
===================================================================
--- tree.h (revision 152256)
+++ tree.h (working copy)
@@ -3147,6 +3147,18 @@ struct GTY(())
#define DECL_IS_TM_CLONE(NODE) \
(FUNCTION_DECL_CHECK (NODE)->function_decl.tm_clone_flag)
+/* Nonzero if a FUNCTION_CODE is a TM load/store. */
+#define BUILTIN_TM_LOAD_STORE_P(FN) \
+ ((FN) >= BUILT_IN_TM_STORE_1 && (FN) <= BUILT_IN_TM_LOAD_RFW_LDOUBLE)
+
+/* Nonzero if a FUNCTION_CODE is a TM load. */
+#define BUILTIN_TM_LOAD_P(FN) \
+ ((FN) >= BUILT_IN_TM_LOAD_1 && (FN) <= BUILT_IN_TM_LOAD_RFW_LDOUBLE)
+
+/* Nonzero if a FUNCTION_CODE is a TM store. */
+#define BUILTIN_TM_STORE_P(FN) \
+ ((FN) >= BUILT_IN_TM_STORE_1 && (FN) <= BUILT_IN_TM_STORE_WAW_LDOUBLE)
+
/* Nonzero in a FUNCTION_DECL that should be always inlined by the inliner
disregarding size and cost heuristics. This is equivalent to using
the always_inline attribute without the required diagnostics if the
Index: testsuite/gcc.dg/tm/memopt-2.c
===================================================================
--- testsuite/gcc.dg/tm/memopt-2.c (revision 152281)
+++ testsuite/gcc.dg/tm/memopt-2.c (working copy)
@@ -10,5 +10,5 @@ f()
}
}
-/* { dg-final { scan-tree-dump-times "RfW.*RU1 \\(&c\\);" 1 "tmmemopt" } } */
-/* { dg-final { scan-tree-dump-times "WaW.*WU1 \\(&c," 1 "tmmemopt" } } */
+/* { dg-final { scan-tree-dump-times "transforming.*RfWU1 \\(&c" 1 "tmmemopt" } } */
+/* { dg-final { scan-tree-dump-times "transforming.*WaWU1 \\(&c" 1 "tmmemopt" } } */
Index: tree-ssa-alias.c
===================================================================
--- tree-ssa-alias.c (revision 152256)
+++ tree-ssa-alias.c (working copy)
@@ -910,10 +910,16 @@ ref_maybe_used_by_call_p_1 (gimple call,
for the list of builtins we might need to handle here. */
if (callee != NULL_TREE
&& DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
- switch (DECL_FUNCTION_CODE (callee))
- {
- /* All the following functions clobber memory pointed to by
- their first argument. */
+ {
+ unsigned code = DECL_FUNCTION_CODE (callee);
+
+ /* TM load builtins read memory pointed to by their first argument. */
+ if (BUILTIN_TM_LOAD_P (code))
+ return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
+ else switch (code)
+ {
+ /* All the following functions clobber memory pointed to by
+ their first argument. */
case BUILT_IN_STRCPY:
case BUILT_IN_STRNCPY:
case BUILT_IN_BCOPY:
@@ -936,17 +942,7 @@ ref_maybe_used_by_call_p_1 (gimple call,
size);
return refs_may_alias_p_1 (&dref, ref, false);
}
- /* The following functions read memory pointed to by their
- first argument. */
- case BUILT_IN_TM_LOAD_1:
- case BUILT_IN_TM_LOAD_2:
- case BUILT_IN_TM_LOAD_4:
- case BUILT_IN_TM_LOAD_8:
- case BUILT_IN_TM_LOAD_FLOAT:
- case BUILT_IN_TM_LOAD_DOUBLE:
- case BUILT_IN_TM_LOAD_LDOUBLE:
- return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
- /* The following builtins do not read from memory. */
+ /* The following builtins do not read from memory. */
case BUILT_IN_FREE:
case BUILT_IN_MEMSET:
case BUILT_IN_FREXP:
@@ -971,7 +967,8 @@ ref_maybe_used_by_call_p_1 (gimple call,
default:
/* Fallthru to general call handling. */;
- }
+ }
+ }
/* Check if base is a global static variable that is not read
by the function. */
@@ -1143,10 +1140,16 @@ call_may_clobber_ref_p_1 (gimple call, a
for the list of builtins we might need to handle here. */
if (callee != NULL_TREE
&& DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
- switch (DECL_FUNCTION_CODE (callee))
- {
- /* All the following functions clobber memory pointed to by
- their first argument. */
+ {
+ unsigned code = DECL_FUNCTION_CODE (callee);
+
+ if (BUILTIN_TM_STORE_P (code))
+ goto clobber_memory;
+
+ switch (code)
+ {
+ /* All the following functions clobber memory pointed to by
+ their first argument. */
case BUILT_IN_STRCPY:
case BUILT_IN_STRNCPY:
case BUILT_IN_BCOPY:
@@ -1158,18 +1161,13 @@ call_may_clobber_ref_p_1 (gimple call, a
case BUILT_IN_STRCAT:
case BUILT_IN_STRNCAT:
case BUILT_IN_MEMSET:
- case BUILT_IN_TM_STORE_1:
- case BUILT_IN_TM_STORE_2:
- case BUILT_IN_TM_STORE_4:
- case BUILT_IN_TM_STORE_8:
- case BUILT_IN_TM_STORE_FLOAT:
- case BUILT_IN_TM_STORE_DOUBLE:
- case BUILT_IN_TM_STORE_LDOUBLE:
case BUILT_IN_TM_MEMCPY:
case BUILT_IN_TM_MEMMOVE:
{
ao_ref dref;
- tree size = NULL_TREE;
+ tree size;
+ clobber_memory:
+ size = NULL_TREE;
if (gimple_call_num_args (call) == 3)
size = gimple_call_arg (call, 2);
ao_ref_init_from_ptr_and_size (&dref,
@@ -1177,9 +1175,9 @@ call_may_clobber_ref_p_1 (gimple call, a
size);
return refs_may_alias_p_1 (&dref, ref, false);
}
- /* Freeing memory kills the pointed-to memory. More importantly
- the call has to serve as a barrier for moving loads and stores
- across it. */
+ /* Freeing memory kills the pointed-to memory. More importantly
+ the call has to serve as a barrier for moving loads and stores
+ across it. */
case BUILT_IN_FREE:
{
tree ptr = gimple_call_arg (call, 0);
@@ -1231,7 +1229,8 @@ call_may_clobber_ref_p_1 (gimple call, a
}
default:
/* Fallthru to general call handling. */;
- }
+ }
+ }
/* Check if base is a global static variable that is not written
by the function. */
Index: trans-mem.c
===================================================================
--- trans-mem.c (revision 152281)
+++ trans-mem.c (working copy)
@@ -294,22 +294,8 @@ is_tm_load (gimple stmt)
return false;
fndecl = gimple_call_fndecl (stmt);
- if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
- switch (DECL_FUNCTION_CODE (fndecl))
- {
- case BUILT_IN_TM_LOAD_1:
- case BUILT_IN_TM_LOAD_2:
- case BUILT_IN_TM_LOAD_4:
- case BUILT_IN_TM_LOAD_8:
- case BUILT_IN_TM_LOAD_FLOAT:
- case BUILT_IN_TM_LOAD_DOUBLE:
- case BUILT_IN_TM_LOAD_LDOUBLE:
- return true;
- default:
- break;
- }
-
- return false;
+ return (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
+ && BUILTIN_TM_LOAD_P (DECL_FUNCTION_CODE (fndecl)));
}
/* Return true if STMT is a TM store. */
@@ -323,22 +309,8 @@ is_tm_store (gimple stmt)
return false;
fndecl = gimple_call_fndecl (stmt);
- if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
- switch (DECL_FUNCTION_CODE (fndecl))
- {
- case BUILT_IN_TM_STORE_1:
- case BUILT_IN_TM_STORE_2:
- case BUILT_IN_TM_STORE_4:
- case BUILT_IN_TM_STORE_8:
- case BUILT_IN_TM_STORE_FLOAT:
- case BUILT_IN_TM_STORE_DOUBLE:
- case BUILT_IN_TM_STORE_LDOUBLE:
- return true;
- default:
- break;
- }
-
- return false;
+ return (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
+ && BUILTIN_TM_STORE_P (DECL_FUNCTION_CODE (fndecl)));
}
/* Return true if FNDECL is BUILT_IN_TM_ABORT. */
@@ -1733,21 +1705,6 @@ tm_memopt_accumulate_memops (basic_block
}
}
-/* Inform about an upcoming load/store optimization. STMT is the
- statement about to be transformed. PREFIX is the type of
- optimization to be done. */
-
-static void
-dump_tm_memopt_transform (const char *prefix, gimple stmt)
-{
- if (dump_file)
- {
- fprintf (dump_file, "TM memopt: transforming into %s: ", prefix);
- print_gimple_stmt (dump_file, stmt, 0, 0);
- fprintf (dump_file, "\n");
- }
-}
-
/* Prettily dump one of the memopt sets. BITS is the bitmap to dump. */
static void
@@ -2015,6 +1972,47 @@ tm_memopt_compute_antic (struct tm_regio
dump_tm_memopt_sets (blocks);
}
+/* Offsets of load variants from TM_LOAD. For example,
+ BUILT_IN_TM_LOAD_RAR* is an offset of 1 from BUILT_IN_TM_LOAD*.
+ See gtm-builtins.def. */
+#define TRANSFORM_RAR 1
+#define TRANSFORM_RAW 2
+#define TRANSFORM_RFW 3
+/* Offsets of store variants from TM_STORE. */
+#define TRANSFORM_WAR 1
+#define TRANSFORM_WAW 2
+
+/* Inform about a load/store optimization. */
+
+static void
+dump_tm_memopt_transform (gimple stmt)
+{
+ if (dump_file)
+ {
+ fprintf (dump_file, "TM memopt: transforming: ");
+ print_gimple_stmt (dump_file, stmt, 0, 0);
+ fprintf (dump_file, "\n");
+ }
+}
+
+/* Perform a read/write optimization. Replaces the TM builtin in STMT
+ by a builtin that is OFFSET entries down in the builtins table in
+ gtm-builtins.def. */
+
+static void
+tm_memopt_transform_stmt (unsigned int offset,
+ gimple stmt,
+ gimple_stmt_iterator *gsi)
+{
+ tree fn = gimple_call_fn (stmt);
+ gcc_assert (TREE_CODE (fn) == ADDR_EXPR);
+ TREE_OPERAND (fn, 0)
+ = built_in_decls[DECL_FUNCTION_CODE (TREE_OPERAND (fn, 0)) + offset];
+ gimple_call_set_fn (stmt, fn);
+ gsi_replace (gsi, stmt, true);
+ dump_tm_memopt_transform (stmt);
+}
+
/* Perform the actual TM memory optimization transformations in the
basic blocks in BLOCKS. */
@@ -2035,40 +2033,34 @@ tm_memopt_transform_blocks (VEC (basic_b
bitmap store_antic = STORE_ANTIC_OUT (bb);
unsigned int loc;
+ /* FIXME: Make sure we're not transforming something like a
+ user-coded read-after-write, etc. Check for simple
+ loads, not the optimized variants. Similarly for
+ is_tm_store below. */
if (is_tm_load (stmt))
{
loc = tm_memopt_value_number (stmt, NO_INSERT);
if (store_avail && bitmap_bit_p (store_avail, loc))
- {
- dump_tm_memopt_transform ("RaW", stmt);
- }
+ tm_memopt_transform_stmt (TRANSFORM_RAW, stmt, &gsi);
else if (store_antic && bitmap_bit_p (store_antic, loc))
{
- dump_tm_memopt_transform ("RfW", stmt);
+ tm_memopt_transform_stmt (TRANSFORM_RFW, stmt, &gsi);
bitmap_set_bit (store_avail, loc);
}
else if (read_avail && bitmap_bit_p (read_avail, loc))
- {
- dump_tm_memopt_transform ("RaR", stmt);
- }
+ tm_memopt_transform_stmt (TRANSFORM_RAR, stmt, &gsi);
else
- {
- bitmap_set_bit (read_avail, loc);
- }
+ bitmap_set_bit (read_avail, loc);
}
else if (is_tm_store (stmt))
{
loc = tm_memopt_value_number (stmt, NO_INSERT);
if (store_avail && bitmap_bit_p (store_avail, loc))
- {
- dump_tm_memopt_transform ("WaW", stmt);
- }
+ tm_memopt_transform_stmt (TRANSFORM_WAW, stmt, &gsi);
else
{
if (read_avail && bitmap_bit_p (read_avail, loc))
- {
- dump_tm_memopt_transform ("WaR", stmt);
- }
+ tm_memopt_transform_stmt (TRANSFORM_WAR, stmt, &gsi);
bitmap_set_bit (store_avail, loc);
}
}
Index: calls.c
===================================================================
--- calls.c (revision 152256)
+++ calls.c (working copy)
@@ -470,34 +470,25 @@ special_function_p (const_tree fndecl, i
if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
{
- switch (DECL_FUNCTION_CODE (fndecl))
- {
- case BUILT_IN_TM_COMMIT:
- case BUILT_IN_TM_COMMIT_EH:
- case BUILT_IN_TM_ABORT:
- case BUILT_IN_TM_IRREVOCABLE:
- case BUILT_IN_TM_GETTMCLONE_IRR:
- case BUILT_IN_TM_MEMCPY:
- case BUILT_IN_TM_MEMMOVE:
- case BUILT_IN_TM_STORE_1:
- case BUILT_IN_TM_STORE_2:
- case BUILT_IN_TM_STORE_4:
- case BUILT_IN_TM_STORE_8:
- case BUILT_IN_TM_STORE_FLOAT:
- case BUILT_IN_TM_STORE_DOUBLE:
- case BUILT_IN_TM_STORE_LDOUBLE:
- case BUILT_IN_TM_LOAD_1:
- case BUILT_IN_TM_LOAD_2:
- case BUILT_IN_TM_LOAD_4:
- case BUILT_IN_TM_LOAD_8:
- case BUILT_IN_TM_LOAD_FLOAT:
- case BUILT_IN_TM_LOAD_DOUBLE:
- case BUILT_IN_TM_LOAD_LDOUBLE:
- flags |= ECF_TM_OPS;
- break;
- default:
- break;
- }
+ unsigned code = DECL_FUNCTION_CODE (fndecl);
+
+ if (BUILTIN_TM_LOAD_STORE_P (code))
+ flags |= ECF_TM_OPS;
+ else
+ switch (code)
+ {
+ case BUILT_IN_TM_COMMIT:
+ case BUILT_IN_TM_COMMIT_EH:
+ case BUILT_IN_TM_ABORT:
+ case BUILT_IN_TM_IRREVOCABLE:
+ case BUILT_IN_TM_GETTMCLONE_IRR:
+ case BUILT_IN_TM_MEMCPY:
+ case BUILT_IN_TM_MEMMOVE:
+ flags |= ECF_TM_OPS;
+ break;
+ default:
+ break;
+ }
}
if (DECL_NAME (fndecl)
Index: gtm-builtins.def
===================================================================
--- gtm-builtins.def (revision 152256)
+++ gtm-builtins.def (working copy)
@@ -20,32 +20,131 @@ DEF_TM_BUILTIN (BUILT_IN_TM_GETTMCLONE_I
DEF_TM_BUILTIN (BUILT_IN_TM_GETTMCLONE_SAFE, "_ITM_getTMCloneSafe",
BT_FN_PTR_PTR, ATTR_TM_CONST_NOTHROW_LIST)
+/* Writes.
+
+ Note: The writes must follow the following order: STORE, WAR, WAW.
+ The TM optimizations depend on this order.
+
+ BUILT_IN_TM_STORE_1 must be the first builtin.
+ BUILTIN_TM_LOAD_STORE_P depends on this. */
DEF_TM_BUILTIN (BUILT_IN_TM_STORE_1, "_ITM_WU1",
BT_FN_VOID_VPTR_I1, ATTR_TM_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_STORE_WAR_1, "_ITM_WaRU1",
+ BT_FN_VOID_VPTR_I1, ATTR_TM_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_STORE_WAW_1, "_ITM_WaWU1",
+ BT_FN_VOID_VPTR_I1, ATTR_TM_NOTHROW_LIST)
+
DEF_TM_BUILTIN (BUILT_IN_TM_STORE_2, "_ITM_WU2",
BT_FN_VOID_VPTR_I2, ATTR_TM_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_STORE_WAR_2, "_ITM_WaRU2",
+ BT_FN_VOID_VPTR_I2, ATTR_TM_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_STORE_WAW_2, "_ITM_WaWU2",
+ BT_FN_VOID_VPTR_I2, ATTR_TM_NOTHROW_LIST)
+
DEF_TM_BUILTIN (BUILT_IN_TM_STORE_4, "_ITM_WU4",
BT_FN_VOID_VPTR_I4, ATTR_TM_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_STORE_WAR_4, "_ITM_WaRU4",
+ BT_FN_VOID_VPTR_I4, ATTR_TM_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_STORE_WAW_4, "_ITM_WaWU4",
+ BT_FN_VOID_VPTR_I4, ATTR_TM_NOTHROW_LIST)
+
DEF_TM_BUILTIN (BUILT_IN_TM_STORE_8, "_ITM_WU8",
BT_FN_VOID_VPTR_I8, ATTR_TM_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_STORE_WAR_8, "_ITM_WaRU8",
+ BT_FN_VOID_VPTR_I8, ATTR_TM_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_STORE_WAW_8, "_ITM_WaWU8",
+ BT_FN_VOID_VPTR_I8, ATTR_TM_NOTHROW_LIST)
+
DEF_TM_BUILTIN (BUILT_IN_TM_STORE_FLOAT, "_ITM_WF",
BT_FN_VOID_VPTR_FLOAT, ATTR_TM_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_STORE_WAR_FLOAT, "_ITM_WaRF",
+ BT_FN_VOID_VPTR_FLOAT, ATTR_TM_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_STORE_WAW_FLOAT, "_ITM_WaWF",
+ BT_FN_VOID_VPTR_FLOAT, ATTR_TM_NOTHROW_LIST)
+
DEF_TM_BUILTIN (BUILT_IN_TM_STORE_DOUBLE, "_ITM_WD",
BT_FN_VOID_VPTR_DOUBLE, ATTR_TM_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_STORE_WAR_DOUBLE, "_ITM_WaRD",
+ BT_FN_VOID_VPTR_DOUBLE, ATTR_TM_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_STORE_WAW_DOUBLE, "_ITM_WaWD",
+ BT_FN_VOID_VPTR_DOUBLE, ATTR_TM_NOTHROW_LIST)
+
DEF_TM_BUILTIN (BUILT_IN_TM_STORE_LDOUBLE, "_ITM_WE",
BT_FN_VOID_VPTR_LDOUBLE, ATTR_TM_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_STORE_WAR_LDOUBLE, "_ITM_WaRE",
+ BT_FN_VOID_VPTR_LDOUBLE, ATTR_TM_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_STORE_WAW_LDOUBLE, "_ITM_WaWE",
+ BT_FN_VOID_VPTR_LDOUBLE, ATTR_TM_NOTHROW_LIST)
+/* Note: BUILT_IN_TM_STORE_WAW_LDOUBLE must be the last TM store.
+ BUILTIN_TM_STORE_P depends on this. */
+
+/* Reads.
+ Note: The reads must follow the following order: LOAD, RAR, RAW, RFW.
+ The TM optimizations depend on this order. */
DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_1, "_ITM_RU1",
BT_FN_I1_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RAR_1, "_ITM_RaRU1",
+ BT_FN_I1_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RAW_1, "_ITM_RaWU1",
+ BT_FN_I1_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RFW_1, "_ITM_RfWU1",
+ BT_FN_I1_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+
DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_2, "_ITM_RU2",
BT_FN_I2_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RAR_2, "_ITM_RaRU2",
+ BT_FN_I2_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RAW_2, "_ITM_RaWU2",
+ BT_FN_I2_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RFW_2, "_ITM_RfWU2",
+ BT_FN_I2_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+
DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_4, "_ITM_RU4",
BT_FN_I4_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RAR_4, "_ITM_RaRU4",
+ BT_FN_I4_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RAW_4, "_ITM_RaWU4",
+ BT_FN_I4_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RFW_4, "_ITM_RfWU4",
+ BT_FN_I4_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+
DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_8, "_ITM_RU8",
BT_FN_I8_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RAR_8, "_ITM_RaRU8",
+ BT_FN_I8_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RAW_8, "_ITM_RaWU8",
+ BT_FN_I8_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RFW_8, "_ITM_RfWU8",
+ BT_FN_I8_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+
DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_FLOAT, "_ITM_RF",
BT_FN_FLOAT_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RAR_FLOAT, "_ITM_RaRF",
+ BT_FN_FLOAT_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RAW_FLOAT, "_ITM_RaWF",
+ BT_FN_FLOAT_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RFW_FLOAT, "_ITM_RfWF",
+ BT_FN_FLOAT_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+
DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_DOUBLE, "_ITM_RD",
BT_FN_DOUBLE_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RAR_DOUBLE, "_ITM_RaRD",
+ BT_FN_FLOAT_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RAW_DOUBLE, "_ITM_RaWD",
+ BT_FN_FLOAT_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RFW_DOUBLE, "_ITM_RfWD",
+ BT_FN_FLOAT_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+
DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_LDOUBLE, "_ITM_RE",
BT_FN_LDOUBLE_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RAR_LDOUBLE, "_ITM_RaRE",
+ BT_FN_LDOUBLE_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RAW_LDOUBLE, "_ITM_RaWE",
+ BT_FN_LDOUBLE_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+DEF_TM_BUILTIN (BUILT_IN_TM_LOAD_RFW_LDOUBLE, "_ITM_RfWE",
+ BT_FN_LDOUBLE_VPTR, ATTR_TM_PURE_TMPURE_NOTHROW_LIST)
+
+/* Note: BUILT_IN_TM_LOAD_RFW_LDOUBLE must be the last TM load as well
+ as the last builtin. BUILTIN_TM_LOAD_STORE_P and BUILTIN_TM_LOAD_P
+ depend on this. */
Index: tree-ssa-structalias.c
===================================================================
--- tree-ssa-structalias.c (revision 152256)
+++ tree-ssa-structalias.c (working copy)
@@ -3670,94 +3670,13 @@ find_func_aliases (gimple t)
tree fndecl;
if ((fndecl = gimple_call_fndecl (t)) != NULL_TREE
&& DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
- /* ??? All builtins that are handled here need to be handled
- in the alias-oracle query functions explicitly! */
- switch (DECL_FUNCTION_CODE (fndecl))
- {
- /* All the following functions return a pointer to the same object
- as their first argument points to. The functions do not add
- to the ESCAPED solution. The functions make the first argument
- pointed to memory point to what the second argument pointed to
- memory points to. */
- case BUILT_IN_STRCPY:
- case BUILT_IN_STRNCPY:
- case BUILT_IN_BCOPY:
- case BUILT_IN_MEMCPY:
- case BUILT_IN_MEMMOVE:
- case BUILT_IN_MEMPCPY:
- case BUILT_IN_STPCPY:
- case BUILT_IN_STPNCPY:
- case BUILT_IN_STRCAT:
- case BUILT_IN_STRNCAT:
- case BUILT_IN_TM_MEMCPY:
- case BUILT_IN_TM_MEMMOVE:
- {
- tree res = gimple_call_lhs (t);
- tree dest = gimple_call_arg (t, 0);
- tree src = gimple_call_arg (t, 1);
- if (res != NULL_TREE)
- {
- get_constraint_for (res, &lhsc);
- if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
- || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
- || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY)
- get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
- else
- get_constraint_for (dest, &rhsc);
- process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
- }
- get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
- get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
- do_deref (&lhsc);
- do_deref (&rhsc);
- process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
- return;
- }
- case BUILT_IN_MEMSET:
- {
- tree res = gimple_call_lhs (t);
- tree dest = gimple_call_arg (t, 0);
- unsigned i;
- ce_s *lhsp;
- struct constraint_expr ac;
- if (res != NULL_TREE)
- {
- get_constraint_for (res, &lhsc);
- get_constraint_for (dest, &rhsc);
- process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
- }
- get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
- do_deref (&lhsc);
- if (flag_delete_null_pointer_checks
- && integer_zerop (gimple_call_arg (t, 1)))
- {
- ac.type = ADDRESSOF;
- ac.var = nothing_id;
- }
- else
- {
- ac.type = SCALAR;
- ac.var = integer_id;
- }
- ac.offset = 0;
- for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
- process_constraint (new_constraint (*lhsp, ac));
- VEC_free (ce_s, heap, lhsc);
- return;
- }
- case BUILT_IN_TM_STORE_1:
- case BUILT_IN_TM_STORE_2:
- case BUILT_IN_TM_STORE_4:
- case BUILT_IN_TM_STORE_8:
- case BUILT_IN_TM_STORE_FLOAT:
- case BUILT_IN_TM_STORE_DOUBLE:
- case BUILT_IN_TM_STORE_LDOUBLE:
+ {
+ unsigned code = DECL_FUNCTION_CODE (fndecl);
+
+ /* ??? All builtins that are handled here need to be handled
+ in the alias-oracle query functions explicitly! */
+
+ if (BUILTIN_TM_STORE_P (code))
{
tree addr = gimple_call_arg (t, 0);
tree src = gimple_call_arg (t, 1);
@@ -3770,13 +3689,7 @@ find_func_aliases (gimple t)
VEC_free (ce_s, heap, rhsc);
return;
}
- case BUILT_IN_TM_LOAD_1:
- case BUILT_IN_TM_LOAD_2:
- case BUILT_IN_TM_LOAD_4:
- case BUILT_IN_TM_LOAD_8:
- case BUILT_IN_TM_LOAD_FLOAT:
- case BUILT_IN_TM_LOAD_DOUBLE:
- case BUILT_IN_TM_LOAD_LDOUBLE:
+ if (BUILTIN_TM_LOAD_P (code))
{
tree dest = gimple_call_lhs (t);
tree addr = gimple_call_arg (t, 0);
@@ -3789,35 +3702,115 @@ find_func_aliases (gimple t)
VEC_free (ce_s, heap, rhsc);
return;
}
- /* All the following functions do not return pointers, do not
- modify the points-to sets of memory reachable from their
- arguments and do not add to the ESCAPED solution. */
- case BUILT_IN_SINCOS:
- case BUILT_IN_SINCOSF:
- case BUILT_IN_SINCOSL:
- case BUILT_IN_FREXP:
- case BUILT_IN_FREXPF:
- case BUILT_IN_FREXPL:
- case BUILT_IN_GAMMA_R:
- case BUILT_IN_GAMMAF_R:
- case BUILT_IN_GAMMAL_R:
- case BUILT_IN_LGAMMA_R:
- case BUILT_IN_LGAMMAF_R:
- case BUILT_IN_LGAMMAL_R:
- case BUILT_IN_MODF:
- case BUILT_IN_MODFF:
- case BUILT_IN_MODFL:
- case BUILT_IN_REMQUO:
- case BUILT_IN_REMQUOF:
- case BUILT_IN_REMQUOL:
- case BUILT_IN_FREE:
- return;
- /* printf-style functions may have hooks to set pointers to
- point to somewhere into the generated string. Leave them
- for a later excercise... */
- default:
- /* Fallthru to general call handling. */;
- }
+ switch (code)
+ {
+ /* All the following functions return a pointer to the same object
+ as their first argument points to. The functions do not add
+ to the ESCAPED solution. The functions make the first argument
+ pointed to memory point to what the second argument pointed to
+ memory points to. */
+ case BUILT_IN_STRCPY:
+ case BUILT_IN_STRNCPY:
+ case BUILT_IN_BCOPY:
+ case BUILT_IN_MEMCPY:
+ case BUILT_IN_MEMMOVE:
+ case BUILT_IN_MEMPCPY:
+ case BUILT_IN_STPCPY:
+ case BUILT_IN_STPNCPY:
+ case BUILT_IN_STRCAT:
+ case BUILT_IN_STRNCAT:
+ case BUILT_IN_TM_MEMCPY:
+ case BUILT_IN_TM_MEMMOVE:
+ {
+ tree res = gimple_call_lhs (t);
+ tree dest = gimple_call_arg (t, 0);
+ tree src = gimple_call_arg (t, 1);
+ if (res != NULL_TREE)
+ {
+ get_constraint_for (res, &lhsc);
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY)
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
+ else
+ get_constraint_for (dest, &rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ VEC_free (ce_s, heap, lhsc);
+ VEC_free (ce_s, heap, rhsc);
+ }
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
+ get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
+ do_deref (&lhsc);
+ do_deref (&rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ VEC_free (ce_s, heap, lhsc);
+ VEC_free (ce_s, heap, rhsc);
+ return;
+ }
+ case BUILT_IN_MEMSET:
+ {
+ tree res = gimple_call_lhs (t);
+ tree dest = gimple_call_arg (t, 0);
+ unsigned i;
+ ce_s *lhsp;
+ struct constraint_expr ac;
+ if (res != NULL_TREE)
+ {
+ get_constraint_for (res, &lhsc);
+ get_constraint_for (dest, &rhsc);
+ process_all_all_constraints (lhsc, rhsc);
+ VEC_free (ce_s, heap, lhsc);
+ VEC_free (ce_s, heap, rhsc);
+ }
+ get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
+ do_deref (&lhsc);
+ if (flag_delete_null_pointer_checks
+ && integer_zerop (gimple_call_arg (t, 1)))
+ {
+ ac.type = ADDRESSOF;
+ ac.var = nothing_id;
+ }
+ else
+ {
+ ac.type = SCALAR;
+ ac.var = integer_id;
+ }
+ ac.offset = 0;
+ for (i = 0; VEC_iterate (ce_s, lhsc, i, lhsp); ++i)
+ process_constraint (new_constraint (*lhsp, ac));
+ VEC_free (ce_s, heap, lhsc);
+ return;
+ }
+ /* All the following functions do not return pointers, do not
+ modify the points-to sets of memory reachable from their
+ arguments and do not add to the ESCAPED solution. */
+ case BUILT_IN_SINCOS:
+ case BUILT_IN_SINCOSF:
+ case BUILT_IN_SINCOSL:
+ case BUILT_IN_FREXP:
+ case BUILT_IN_FREXPF:
+ case BUILT_IN_FREXPL:
+ case BUILT_IN_GAMMA_R:
+ case BUILT_IN_GAMMAF_R:
+ case BUILT_IN_GAMMAL_R:
+ case BUILT_IN_LGAMMA_R:
+ case BUILT_IN_LGAMMAF_R:
+ case BUILT_IN_LGAMMAL_R:
+ case BUILT_IN_MODF:
+ case BUILT_IN_MODFF:
+ case BUILT_IN_MODFL:
+ case BUILT_IN_REMQUO:
+ case BUILT_IN_REMQUOF:
+ case BUILT_IN_REMQUOL:
+ case BUILT_IN_FREE:
+ return;
+ /* printf-style functions may have hooks to set pointers to
+ point to somewhere into the generated string. Leave them
+ for a later excercise... */
+ default:
+ /* Fallthru to general call handling. */;
+ }
+ }
if (!in_ipa_mode)
{
VEC(ce_s, heap) *rhsc = NULL;
More information about the Gcc-patches
mailing list