This is the mail archive of the
gcc-patches@gcc.gnu.org
mailing list for the GCC project.
struct reorg branch updated from tree profiling branch
- From: Kenneth Zadeck <zadeck at naturalbridge dot com>
- To: gcc-patches at gcc dot gnu dot org, "Tice, Caroline" <ctice at apple dot com>,MUSTAFA at il dot ibm dot com
- Date: Mon, 24 Jan 2005 15:35:52 -0500
- Subject: struct reorg branch updated from tree profiling branch
2005-01-24 Kenneth Zadeck <zadeck@naturalbridge.com>
Merged with tree-profiling branch, as of 2005-01-22. Tag on
the tree-profiling branch is "tree-profiling-merge-20050122".
diff -r -up -x CVS ../gccSBaseline/gcc/alias.c gcc/alias.c
--- ../gccSBaseline/gcc/alias.c 2005-01-21 22:01:08.000000000 -0500
+++ gcc/alias.c 2005-01-22 15:58:08.000000000 -0500
@@ -45,6 +45,54 @@ Software Foundation, 59 Temple Place - S
#include "cgraph.h"
#include "varray.h"
+/* The aliasing API provided here solves related but different problems:
+
+ Say there exists (in c)
+
+ struct X {
+ struct Y y1;
+ struct Z z2;
+ } x1, *px1, *px2;
+
+ struct Y y2, *py;
+ struct Z z2, *pz;
+
+
+ py = &px1.y1;
+ px2 = &x1;
+
+ Consider the four questions:
+
+ Can a store to x1 interfere with px2->y2?
+ Can a store to x1 interfere with px2->z2?
+ (*px2).z2
+ Can a store to x1 change the value pointed to by with py?
+ Can a store to x1 change the value pointed to by with pz?
+
+ The answer to these questions can be yes, yes, yes, and no.
+
+ The first two questions can be answered with a simple examination
+ of the type system. If structure X contains a field of type Y then
+ a store thru a pointer to an X can overwrite any field that is
+ contained (recursively) in an X (unless we know that px1 != px2).
+
+ The last two of the questions can be solved in the same way as the
+ first two questions but this is too conservative. The observation
+ is that if the address of a field is not explicitly taken and the
+ type is completely local to the compilation unit, then it is
+ impossible that a pointer to an instance of the field type overlaps
+ with an enclosing structure.
+
+ Historically in GCC, these two problems were combined and a single
+ data structure was used to represent the solution to these
+ problems. We now have two similar but different data structures,
+ The data structure to solve the last two question is similar to the
+ first, but does not contain have the fields in it whose address are
+ never taken. For types that do escape the compilation unit, the
+ data structures will have identical information.
+
+*/
+
/* The alias sets assigned to MEMs assist the back-end in determining
which MEMs can alias which other MEMs. In general, two MEMs in
different alias sets cannot alias each other, with one important
@@ -2006,22 +2054,34 @@ nonoverlapping_memrefs_p (rtx x, rtx y)
/* Unless both have exprs, we can't tell anything. */
if (exprx == 0 || expry == 0)
return 0;
-
+
/* If both are field references, we may be able to determine something. */
if (TREE_CODE (exprx) == COMPONENT_REF
&& TREE_CODE (expry) == COMPONENT_REF
&& nonoverlapping_component_refs_p (exprx, expry))
return 1;
+
/* If the field reference test failed, look at the DECLs involved. */
moffsetx = MEM_OFFSET (x);
if (TREE_CODE (exprx) == COMPONENT_REF)
{
- tree t = decl_for_component_ref (exprx);
- if (! t)
- return 0;
- moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
- exprx = t;
+ if (TREE_CODE (expry) == VAR_DECL
+ && POINTER_TYPE_P (TREE_TYPE (expry)))
+ {
+ tree field = TREE_OPERAND (exprx, 1);
+ tree fieldcontext = DECL_FIELD_CONTEXT (field);
+ if (ipa_static_address_not_taken_of_field (fieldcontext,
+ TREE_TYPE (field)))
+ return 1;
+ }
+ {
+ tree t = decl_for_component_ref (exprx);
+ if (! t)
+ return 0;
+ moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
+ exprx = t;
+ }
}
else if (INDIRECT_REF_P (exprx))
{
@@ -2034,11 +2094,22 @@ nonoverlapping_memrefs_p (rtx x, rtx y)
moffsety = MEM_OFFSET (y);
if (TREE_CODE (expry) == COMPONENT_REF)
{
- tree t = decl_for_component_ref (expry);
- if (! t)
- return 0;
- moffsety = adjust_offset_for_component_ref (expry, moffsety);
- expry = t;
+ if (TREE_CODE (exprx) == VAR_DECL
+ && POINTER_TYPE_P (TREE_TYPE (exprx)))
+ {
+ tree field = TREE_OPERAND (expry, 1);
+ tree fieldcontext = DECL_FIELD_CONTEXT (field);
+ if (ipa_static_address_not_taken_of_field (fieldcontext,
+ TREE_TYPE (field)))
+ return 1;
+ }
+ {
+ tree t = decl_for_component_ref (expry);
+ if (! t)
+ return 0;
+ moffsety = adjust_offset_for_component_ref (expry, moffsety);
+ expry = t;
+ }
}
else if (INDIRECT_REF_P (expry))
{
diff -r -up -x CVS ../gccSBaseline/gcc/builtin-attrs.def gcc/builtin-attrs.def
--- ../gccSBaseline/gcc/builtin-attrs.def 2004-11-23 20:07:45.000000000 -0500
+++ gcc/builtin-attrs.def 2005-01-22 15:58:10.000000000 -0500
@@ -74,6 +74,7 @@ DEF_ATTR_IDENT (ATTR_CONST, "const")
DEF_ATTR_IDENT (ATTR_FORMAT, "format")
DEF_ATTR_IDENT (ATTR_FORMAT_ARG, "format_arg")
DEF_ATTR_IDENT (ATTR_MALLOC, "malloc")
+DEF_ATTR_IDENT (ATTR_POINTER_NO_ESCAPE, "pointer_no_escape")
DEF_ATTR_IDENT (ATTR_NONNULL, "nonnull")
DEF_ATTR_IDENT (ATTR_NORETURN, "noreturn")
DEF_ATTR_IDENT (ATTR_NOTHROW, "nothrow")
@@ -88,6 +89,9 @@ DEF_ATTR_IDENT (ATTR_SENTINEL, "sentinel
DEF_ATTR_IDENT (ATTR_STRFMON, "strfmon")
DEF_ATTR_IDENT (ATTR_STRFTIME, "strftime")
+
+DEF_ATTR_TREE_LIST (ATTR_POINTER_NO_ESCAPE_LIST, ATTR_POINTER_NO_ESCAPE, \
+ ATTR_NULL, ATTR_NULL)
DEF_ATTR_TREE_LIST (ATTR_NOTHROW_LIST, ATTR_NOTHROW, ATTR_NULL, ATTR_NULL)
DEF_ATTR_TREE_LIST (ATTR_CONST_NOTHROW_LIST, ATTR_CONST, \
diff -r -up -x CVS ../gccSBaseline/gcc/builtins.def gcc/builtins.def
--- ../gccSBaseline/gcc/builtins.def 2004-11-23 20:07:45.000000000 -0500
+++ gcc/builtins.def 2005-01-22 15:58:12.000000000 -0500
@@ -577,6 +577,7 @@ DEF_GCC_BUILTIN (BUILT_IN_INIT_DW
DEF_EXT_LIB_BUILTIN (BUILT_IN_FINITE, "finite", BT_FN_INT_DOUBLE, ATTR_CONST_NOTHROW_LIST)
DEF_EXT_LIB_BUILTIN (BUILT_IN_FINITEF, "finitef", BT_FN_INT_FLOAT, ATTR_CONST_NOTHROW_LIST)
DEF_EXT_LIB_BUILTIN (BUILT_IN_FINITEL, "finitel", BT_FN_INT_LONGDOUBLE, ATTR_CONST_NOTHROW_LIST)
+DEF_LIB_BUILTIN (BUILT_IN_FREE, "free", BT_FN_VOID_PTR, ATTR_POINTER_NO_ESCAPE_LIST)
DEF_C99_C90RES_BUILTIN (BUILT_IN_ISINF, "isinf", BT_FN_INT_VAR, ATTR_CONST_NOTHROW_LIST)
DEF_EXT_LIB_BUILTIN (BUILT_IN_ISINFF, "isinff", BT_FN_INT_FLOAT, ATTR_CONST_NOTHROW_LIST)
DEF_EXT_LIB_BUILTIN (BUILT_IN_ISINFL, "isinfl", BT_FN_INT_LONGDOUBLE, ATTR_CONST_NOTHROW_LIST)
diff -r -up -x CVS ../gccSBaseline/gcc/calls.c gcc/calls.c
--- ../gccSBaseline/gcc/calls.c 2005-01-21 22:01:26.000000000 -0500
+++ gcc/calls.c 2005-01-22 15:58:21.000000000 -0500
@@ -578,6 +578,10 @@ flags_from_decl_or_type (tree exp)
if (DECL_IS_MALLOC (exp))
flags |= ECF_MALLOC;
+ /* The function exp may have the `pointer_no_escape' attribute. */
+ if (DECL_IS_POINTER_NO_ESCAPE (exp))
+ flags |= ECF_POINTER_NO_ESCAPE;
+
/* The function exp may have the `pure' attribute. */
if (DECL_IS_PURE (exp))
flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
diff -r -up -x CVS ../gccSBaseline/gcc/c-common.c gcc/c-common.c
--- ../gccSBaseline/gcc/c-common.c 2005-01-21 22:01:14.000000000 -0500
+++ gcc/c-common.c 2005-01-22 15:58:13.000000000 -0500
@@ -550,6 +550,7 @@ static tree handle_tls_model_attribute (
static tree handle_no_instrument_function_attribute (tree *, tree,
tree, int, bool *);
static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
+static tree handle_pointer_no_escape_attribute (tree *, tree, tree, int, bool *);
static tree handle_no_limit_stack_attribute (tree *, tree, tree, int,
bool *);
static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
@@ -618,6 +619,8 @@ const struct attribute_spec c_common_att
handle_alias_attribute },
{ "no_instrument_function", 0, 0, true, false, false,
handle_no_instrument_function_attribute },
+ { "pointer_no_escape", 0, 0, true, false, false,
+ handle_pointer_no_escape_attribute },
{ "malloc", 0, 0, true, false, false,
handle_malloc_attribute },
{ "no_stack_limit", 0, 0, true, false, false,
@@ -4754,6 +4757,25 @@ handle_malloc_attribute (tree *node, tre
return NULL_TREE;
}
+/* Handle a "pointer_no_escape" attribute; arguments as in
+ struct attribute_spec.handler. */
+
+static tree
+handle_pointer_no_escape_attribute (tree *node, tree name, tree ARG_UNUSED (args),
+ int ARG_UNUSED (flags), bool *no_add_attrs)
+{
+ if (TREE_CODE (*node) == FUNCTION_DECL)
+ DECL_IS_POINTER_NO_ESCAPE (*node) = 1;
+ /* ??? TODO: Support types. */
+ else
+ {
+ warning ("%qs attribute ignored", IDENTIFIER_POINTER (name));
+ *no_add_attrs = true;
+ }
+
+ return NULL_TREE;
+}
+
/* Handle a "no_limit_stack" attribute; arguments as in
struct attribute_spec.handler. */
diff -r -up -x CVS ../gccSBaseline/gcc/c-decl.c gcc/c-decl.c
--- ../gccSBaseline/gcc/c-decl.c 2005-01-21 22:01:19.000000000 -0500
+++ gcc/c-decl.c 2005-01-22 15:58:16.000000000 -0500
@@ -1733,6 +1733,7 @@ merge_decls (tree newdecl, tree olddecl,
TREE_THIS_VOLATILE (newdecl) |= TREE_THIS_VOLATILE (olddecl);
TREE_READONLY (newdecl) |= TREE_READONLY (olddecl);
DECL_IS_MALLOC (newdecl) |= DECL_IS_MALLOC (olddecl);
+ DECL_IS_POINTER_NO_ESCAPE (newdecl) |= DECL_IS_POINTER_NO_ESCAPE (olddecl);
DECL_IS_PURE (newdecl) |= DECL_IS_PURE (olddecl);
}
diff -r -up -x CVS ../gccSBaseline/gcc/cgraph.c gcc/cgraph.c
--- ../gccSBaseline/gcc/cgraph.c 2005-01-21 22:01:28.000000000 -0500
+++ gcc/cgraph.c 2005-01-22 15:58:24.000000000 -0500
@@ -434,7 +434,7 @@ cgraph_indirect_assign_edge (struct cgra
struct cgraph_edge *
cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
- tree call_expr)
+ tree call_expr, gcov_type count, int nest)
{
struct cgraph_edge *edge = ggc_alloc (sizeof (struct cgraph_edge));
#ifdef ENABLE_CHECKING
@@ -468,8 +468,8 @@ cgraph_create_edge (struct cgraph_node *
NEXT_INDIRECT_CALL (edge) = NULL;
caller->callees = edge;
callee->callers = edge;
- edge->count = caller->current_basic_block
- ? caller->current_basic_block->count : 0;
+ edge->count = count;
+ edge->loop_nest = nest;
return edge;
}
@@ -767,6 +767,8 @@ dump_cgraph_node (FILE *f, struct cgraph
if (edge->count)
fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
(HOST_WIDEST_INT)edge->count);
+ if (edge->loop_nest)
+ fprintf (f, "(nested in %i loops) ", edge->loop_nest);
}
fprintf (f, "\n calls: ");
@@ -779,6 +781,8 @@ dump_cgraph_node (FILE *f, struct cgraph
if (edge->count)
fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
(HOST_WIDEST_INT)edge->count);
+ if (edge->loop_nest)
+ fprintf (f, "(nested in %i loops) ", edge->loop_nest);
}
fprintf (f, "\n cycle: ");
n = node->next_cycle;
@@ -1027,12 +1031,15 @@ cgraph_function_possibly_inlined_p (tree
/* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
struct cgraph_edge *
cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
- tree call_expr, int count_scale)
+ tree call_expr, int count_scale, int loop_nest)
{
- struct cgraph_edge *new = cgraph_create_edge (n, e->callee, call_expr);
+ struct cgraph_edge *new;
+
+ new = cgraph_create_edge (n, e->callee, call_expr,
+ e->count * count_scale / REG_BR_PROB_BASE,
+ e->loop_nest + loop_nest);
new->inline_failed = e->inline_failed;
- new->count = e->count * count_scale / REG_BR_PROB_BASE;
e->count -= new->count;
return new;
}
@@ -1040,7 +1047,7 @@ cgraph_clone_edge (struct cgraph_edge *e
/* Create node representing clone of N executed COUNT times. Decrease
the execution counts from original node too. */
struct cgraph_node *
-cgraph_clone_node (struct cgraph_node *n, gcov_type count)
+cgraph_clone_node (struct cgraph_node *n, gcov_type count, int loop_nest)
{
struct cgraph_node *new = cgraph_create_node ();
struct cgraph_edge *e;
@@ -1067,7 +1074,7 @@ cgraph_clone_node (struct cgraph_node *n
n->count -= count;
for (e = n->callees;e; e=e->next_callee)
- cgraph_clone_edge (e, new, e->call_expr, count_scale);
+ cgraph_clone_edge (e, new, e->call_expr, count_scale, loop_nest);
new->next_clone = n->next_clone;
n->next_clone = new;
diff -r -up -x CVS ../gccSBaseline/gcc/cgraph.h gcc/cgraph.h
--- ../gccSBaseline/gcc/cgraph.h 2005-01-21 22:01:29.000000000 -0500
+++ gcc/cgraph.h 2005-01-22 20:56:46.000000000 -0500
@@ -160,7 +160,7 @@ struct cgraph_node GTY((chain_next ("%h.
bool analyzed;
/* Set when function is scheduled to be assembled. */
bool output;
- /* Used only while constructing the callgraph. */
+ /* FKZ HACK Used only while constructing the callgraph. */
basic_block current_basic_block;
};
@@ -180,6 +180,8 @@ struct cgraph_edge GTY((chain_next ("%h.
const char *inline_failed;
/* Expected number of executions: calculated in profile.c. */
gcov_type count;
+ /* Depth of loop nest, 1 means no loop nest. */
+ int loop_nest;
};
/* The cgraph_varpool data structure.
@@ -238,7 +240,7 @@ void cgraph_remove_edge (struct cgraph_e
void cgraph_remove_node (struct cgraph_node *);
struct cgraph_edge *cgraph_create_edge (struct cgraph_node *,
struct cgraph_node *,
- tree);
+ tree, gcov_type, int);
struct cgraph_edge *cgraph_indirect_assign_edge (struct cgraph_node *,
tree, tree);
struct cgraph_edge *cgraph_indirect_call_edge (struct cgraph_node *,
@@ -251,8 +253,8 @@ struct cgraph_local_info *cgraph_local_i
struct cgraph_global_info *cgraph_global_info (tree);
struct cgraph_rtl_info *cgraph_rtl_info (tree);
const char * cgraph_node_name (struct cgraph_node *);
-struct cgraph_edge * cgraph_clone_edge (struct cgraph_edge *, struct cgraph_node *, tree, int);
-struct cgraph_node * cgraph_clone_node (struct cgraph_node *, gcov_type);
+struct cgraph_edge * cgraph_clone_edge (struct cgraph_edge *, struct cgraph_node *, tree, int, int);
+struct cgraph_node * cgraph_clone_node (struct cgraph_node *, gcov_type, int);
struct cgraph_varpool_node *cgraph_varpool_node (tree);
struct cgraph_varpool_node *cgraph_varpool_node_for_asm (tree asmname);
diff -r -up -x CVS ../gccSBaseline/gcc/cgraphunit.c gcc/cgraphunit.c
--- ../gccSBaseline/gcc/cgraphunit.c 2005-01-21 22:01:29.000000000 -0500
+++ gcc/cgraphunit.c 2005-01-22 17:06:44.000000000 -0500
@@ -167,6 +167,7 @@ Software Foundation, 59 Temple Place - S
#include "tree-gimple.h"
#include "output.h"
#include "tree-pass.h"
+#include "cfgloop.h"
static void cgraph_expand_all_functions (void);
static void cgraph_mark_functions_to_output (void);
@@ -407,6 +408,9 @@ cgraph_lower_function (struct cgraph_nod
node->lowered = true;
}
+/* Used only while constructing the callgraph. */
+static basic_block current_basic_block;
+
/* Walk tree and record all calls. Called via walk_tree. */
static tree
record_call_1 (tree *tp, int *walk_subtrees, void *data)
@@ -444,7 +448,9 @@ record_call_1 (tree *tp, int *walk_subtr
tree decl = get_callee_fndecl (*tp);
if (decl && TREE_CODE (decl) == FUNCTION_DECL)
{
- cgraph_create_edge (data, cgraph_node (decl), *tp);
+ cgraph_create_edge (data, cgraph_node (decl), *tp,
+ current_basic_block->count,
+ current_basic_block->loop_depth);
/* When we see a function call, we don't want to look at the
function reference in the ADDR_EXPR that is hanging from
@@ -545,6 +551,8 @@ static void
cgraph_create_edges (struct cgraph_node *node, tree body)
{
visited_nodes = pointer_set_create ();
+ current_basic_block = NULL;
+
if (TREE_CODE (body) == FUNCTION_DECL)
{
struct function *this_cfun = DECL_STRUCT_FUNCTION (body);
@@ -555,10 +563,10 @@ cgraph_create_edges (struct cgraph_node
enclosing basic-blocks in the call edges. */
FOR_EACH_BB_FN (this_block, this_cfun)
{
- node->current_basic_block = this_block;
+ current_basic_block = this_block;
walk_tree (&this_block->stmt_list, record_call_1, node, visited_nodes);
}
- node->current_basic_block = (basic_block)0;
+ current_basic_block = NULL;
/* Walk over any private statics that may take addresses of functions. */
if (TREE_CODE (DECL_INITIAL (body)) == BLOCK)
@@ -820,6 +828,7 @@ static void
cgraph_analyze_function (struct cgraph_node *node)
{
tree decl = node->decl;
+ struct loops loops;
timevar_push (TV_IPA_ANALYSIS);
push_cfun (DECL_STRUCT_FUNCTION (decl));
@@ -831,8 +840,12 @@ cgraph_analyze_function (struct cgraph_n
node->count = ENTRY_BLOCK_PTR->count;
- /* First kill forward declaration so reverse inlining works properly. */
+ if (optimize)
+ flow_loops_find (&loops, LOOP_TREE);
cgraph_create_edges (node, decl);
+ if (optimize)
+ flow_loops_free (&loops);
+ free_dominance_info (CDI_DOMINATORS);
/* Only optimization we do in non-unit-at-a-time mode is inlining. We don't
use the passmanager then and instead call it directly. Since we probably
@@ -1220,7 +1233,8 @@ cgraph_optimize (void)
if (node->analyzed)
ipa_analyze_function (node);
for (vnode = cgraph_varpool_nodes_queue; vnode; vnode = vnode->next_needed)
- ipa_analyze_variable (vnode);
+ if (!vnode->non_ipa)
+ ipa_analyze_variable (vnode);
if (flag_ipa_cp && flag_ipa_no_cloning)
ipcp_driver ();
@@ -1231,14 +1245,16 @@ cgraph_optimize (void)
dump_varpool (cgraph_dump_file);
}
-
if (flag_peel_structs)
peel_structs ();
if (flag_matrix_flattening)
matrix_reorg ();
+ bitmap_obstack_initialize (NULL);
+
ipa_passes ();
+ bitmap_obstack_release (NULL);
/* FIXME: this should be unnecesary if inliner took care of removing dead
functions. */
cgraph_remove_unreachable_nodes (false, dump_file);
@@ -1259,7 +1275,6 @@ cgraph_optimize (void)
#endif
cgraph_mark_functions_to_output ();
-
cgraph_expand_all_functions ();
cgraph_varpool_assemble_pending_decls ();
diff -r -up -x CVS ../gccSBaseline/gcc/c-typeck.c gcc/c-typeck.c
--- ../gccSBaseline/gcc/c-typeck.c 2005-01-21 22:01:25.000000000 -0500
+++ gcc/c-typeck.c 2005-01-22 15:58:20.000000000 -0500
@@ -1,6 +1,6 @@
/* Build expressions with type checking for C compiler.
- Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
+ Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
+ 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
This file is part of GCC.
@@ -1769,9 +1769,11 @@ build_array_ref (tree array, tree index)
else if (!flag_isoc99 && !lvalue_p (foo))
pedwarn ("ISO C90 forbids subscripting non-lvalue array");
}
-
+
type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (array)));
rval = build4 (ARRAY_REF, type, array, index, NULL_TREE, NULL_TREE);
+
+
/* Array ref is const/volatile if the array elements are
or if the array is. */
TREE_READONLY (rval)
@@ -1792,15 +1794,33 @@ build_array_ref (tree array, tree index)
else
{
tree ar = default_conversion (array);
-
+ tree type;
+ tree res;
if (ar == error_mark_node)
return ar;
gcc_assert (TREE_CODE (TREE_TYPE (ar)) == POINTER_TYPE);
gcc_assert (TREE_CODE (TREE_TYPE (TREE_TYPE (ar))) != FUNCTION_TYPE);
+ type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (ar)));
- return build_indirect_ref (build_binary_op (PLUS_EXPR, ar, index, 0),
- "array indexing");
+ /* If this type has no size, either we're screwed or we've issued an
+ error, so it doesn't matter if we build a MEM_REF here or not. */
+ if (TYPE_SIZE_UNIT (type)
+ && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
+ {
+ res = build2 (MEM_REF, type, ar, index);
+ TREE_READONLY (res) = TYPE_READONLY (TREE_TYPE (TREE_TYPE (ar)));
+ TREE_SIDE_EFFECTS (res)
+ |= (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (ar)))
+ | TREE_SIDE_EFFECTS (ar));
+ TREE_THIS_VOLATILE (res)
+ |= (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (ar)))
+ | TREE_THIS_VOLATILE (ar));
+ return res;
+ }
+ else
+ return build_indirect_ref (build_binary_op (PLUS_EXPR, ar, index, 0),
+ "array indexing");
}
}
@@ -2715,7 +2735,16 @@ build_unary_op (enum tree_code code, tre
return build_binary_op (PLUS_EXPR, TREE_OPERAND (arg, 0),
TREE_OPERAND (arg, 1), 1);
}
-
+ /* Same for the equivalent MEM_REF */
+ if (TREE_CODE (arg) == MEM_REF)
+ {
+ if (!c_mark_addressable (MEM_REF_SYMBOL (arg)))
+ return error_mark_node;
+ return build_binary_op (PLUS_EXPR,
+ MEM_REF_SYMBOL (arg),
+ MEM_REF_INDEX (arg), 1);
+ }
+
/* Anything not already handled and not a true memory reference
or a non-lvalue array is an error. */
else if (typecode != FUNCTION_TYPE && !flag
@@ -2792,6 +2821,7 @@ lvalue_p (tree ref)
case VAR_DECL:
case PARM_DECL:
case RESULT_DECL:
+ case MEM_REF:
case ERROR_MARK:
return (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
&& TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE);
diff -r -up -x CVS ../gccSBaseline/gcc/emit-rtl.c gcc/emit-rtl.c
--- ../gccSBaseline/gcc/emit-rtl.c 2005-01-21 22:01:58.000000000 -0500
+++ gcc/emit-rtl.c 2005-01-22 15:58:47.000000000 -0500
@@ -1509,6 +1509,7 @@ set_mem_attributes_minus_bitpos (rtx ref
this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
if (objectp || TREE_CODE (t) == INDIRECT_REF
|| TREE_CODE (t) == ALIGN_INDIRECT_REF
+ || TREE_CODE (t) == MEM_REF
|| TYPE_ALIGN_OK (type))
align = MAX (align, TYPE_ALIGN (type));
else
diff -r -up -x CVS ../gccSBaseline/gcc/expr.c gcc/expr.c
--- ../gccSBaseline/gcc/expr.c 2005-01-21 22:02:03.000000000 -0500
+++ gcc/expr.c 2005-01-22 15:58:50.000000000 -0500
@@ -5346,7 +5346,6 @@ get_inner_reference (tree exp, HOST_WIDE
/* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
}
break;
-
case ARRAY_REF:
case ARRAY_RANGE_REF:
{
@@ -6783,7 +6782,8 @@ expand_expr_real_1 (tree exp, rtx target
return temp;
}
-
+ case MEM_REF:
+ gcc_unreachable ();
case ARRAY_REF:
{
diff -r -up -x CVS ../gccSBaseline/gcc/gimple-low.c gcc/gimple-low.c
--- ../gccSBaseline/gcc/gimple-low.c 2005-01-21 22:02:15.000000000 -0500
+++ gcc/gimple-low.c 2005-01-22 15:58:59.000000000 -0500
@@ -41,7 +41,7 @@ Software Foundation, 59 Temple Place - S
#include "expr.h"
#include "toplev.h"
#include "tree-pass.h"
-
+#include "pointer-set.h"
struct lower_data
{
/* Block the current statement belongs to. */
@@ -606,3 +606,88 @@ struct tree_opt_pass pass_mark_used_bloc
TODO_dump_func, /* todo_flags_finish */
0 /* letter */
};
+
+/* Lower a MEM_REF tree to it's equivalent INDIRECT_REF form. TP is a
+ pointer to the tree we are currently walking, and DATA is a pointer
+ to it's block_stmt_iterator, used for inserting whatever
+ expressions are necessary to create GIMPLE from it. */
+
+static tree
+lower_memref (tree *tp,
+ int *walk_subtrees ATTRIBUTE_UNUSED,
+ void *data)
+{
+ block_stmt_iterator *bsip = (block_stmt_iterator *)data;
+ if (TREE_CODE (*tp) == MEM_REF)
+ {
+ tree indirect;
+ tree stmts;
+ tree with;
+ with = build2 (MULT_EXPR, TREE_TYPE (MEM_REF_INDEX (*tp)),
+ MEM_REF_INDEX (*tp),
+ size_in_bytes (TREE_TYPE (TREE_TYPE (MEM_REF_SYMBOL (*tp)))));
+ with = fold_convert (TREE_TYPE (MEM_REF_SYMBOL (*tp)), with);
+ with = build2 (PLUS_EXPR, TREE_TYPE (MEM_REF_SYMBOL (*tp)),
+ MEM_REF_SYMBOL (*tp), with);
+
+
+ with = force_gimple_operand (with, &stmts, false, NULL_TREE);
+ if (stmts)
+ bsi_insert_before (bsip, stmts, BSI_SAME_STMT);
+
+ indirect = build1 (INDIRECT_REF, TREE_TYPE (*tp), with);
+ TREE_READONLY (indirect) = TREE_READONLY (*tp);
+ TREE_SIDE_EFFECTS (indirect) = TREE_SIDE_EFFECTS (*tp);
+ TREE_THIS_VOLATILE (indirect) = TREE_THIS_VOLATILE (*tp);
+
+ indirect = force_gimple_operand (indirect, &stmts, false, NULL_TREE);
+ if (stmts)
+ bsi_insert_before (bsip, stmts, BSI_SAME_STMT);
+ *tp = indirect;
+ }
+ return NULL_TREE;
+}
+
+/* Convert MEM_REF trees into their equivalent INDIRECT_REF form
+ across the entire function. MEM_REF (symbol, index) = INDIRECT_REF
+ (symbol + (index * size in bytes of the type symbol points to)) */
+
+static void
+lower_memrefs (void)
+{
+ struct pointer_set_t *visited_nodes;
+ basic_block bb;
+ FOR_ALL_BB (bb)
+ {
+ block_stmt_iterator bsi;
+ for (bsi = bsi_start (bb);
+ !bsi_end_p (bsi);
+ bsi_next (&bsi))
+ {
+ tree stmt = bsi_stmt (bsi);
+ visited_nodes = pointer_set_create ();
+ walk_tree (&stmt, lower_memref, (void *)&bsi, visited_nodes);
+ pointer_set_destroy (visited_nodes);
+
+ }
+ }
+}
+struct tree_opt_pass pass_lower_memref =
+{
+ "memrefs", /* name */
+ NULL, /* gate */
+ NULL, NULL, /* IPA analysis */
+ lower_memrefs, /* execute */
+ NULL, NULL, /* IPA analysis */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func, /* todo_flags_finish */
+ 0 /* letter */
+};
+
diff -r -up -x CVS ../gccSBaseline/gcc/gimplify.c gcc/gimplify.c
--- ../gccSBaseline/gcc/gimplify.c 2005-01-21 22:02:16.000000000 -0500
+++ gcc/gimplify.c 2005-01-22 15:59:00.000000000 -0500
@@ -3765,7 +3765,18 @@ gimplify_expr (tree *expr_p, tree *pre_p
is_gimple_val, fb_rvalue);
recalculate_side_effects (*expr_p);
break;
-
+ case MEM_REF:
+ {
+ enum gimplify_status r0, r1;
+
+ r0 = gimplify_expr (&MEM_REF_SYMBOL (*expr_p), pre_p, post_p,
+ is_gimple_reg, fb_rvalue);
+ r1 = gimplify_expr (&MEM_REF_INDEX (*expr_p), pre_p, post_p,
+ is_gimple_reg, fb_rvalue);
+ recalculate_side_effects (*expr_p);
+ ret = MIN (r0, r1);
+ }
+ break;
case ALIGN_INDIRECT_REF:
case MISALIGNED_INDIRECT_REF:
case INDIRECT_REF:
@@ -3826,7 +3837,6 @@ gimplify_expr (tree *expr_p, tree *pre_p
ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
NULL, is_gimple_val, fb_rvalue);
break;
-
case LABEL_EXPR:
ret = GS_ALL_DONE;
gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
@@ -4347,6 +4357,7 @@ check_pointer_types_r (tree *tp, int *wa
switch (TREE_CODE (t))
{
case INDIRECT_REF:
+ case MEM_REF:
case ARRAY_REF:
otype = TREE_TYPE (t);
ptype = TREE_TYPE (TREE_OPERAND (t, 0));
diff -r -up -x CVS ../gccSBaseline/gcc/ipa-inline.c gcc/ipa-inline.c
--- ../gccSBaseline/gcc/ipa-inline.c 2004-12-09 15:10:27.000000000 -0500
+++ gcc/ipa-inline.c 2005-01-22 15:59:02.000000000 -0500
@@ -126,7 +126,7 @@ cgraph_clone_inlined_nodes (struct cgrap
}
else if (duplicate)
{
- n = cgraph_clone_node (e->callee, e->count);
+ n = cgraph_clone_node (e->callee, e->count, e->loop_nest);
cgraph_redirect_edge_callee (e, n);
}
@@ -349,13 +349,16 @@ cgraph_edge_badness (struct cgraph_edge
}
else
{
- int growth = cgraph_estimate_growth (edge->callee);
+ int nest = MIN (edge->loop_nest, 8);
+ int badness = cgraph_estimate_growth (edge->callee) * 256;
+
+ badness >>= nest;
/* Make recursive inlining happen always after other inlining is done. */
if (cgraph_recursive_inlining_p (edge->caller, edge->callee, NULL))
- return growth + 1;
+ return badness + 1;
else
- return growth;
+ return badness;
}
}
@@ -472,7 +475,7 @@ cgraph_decide_recursive_inlining (struct
cgraph_node_name (node));
/* We need original clone to copy around. */
- master_clone = cgraph_clone_node (node, 0);
+ master_clone = cgraph_clone_node (node, 0, 1);
master_clone->needed = true;
for (e = master_clone->callees; e; e = e->next_callee)
if (!e->inline_failed)
diff -r -up -x CVS ../gccSBaseline/gcc/ipa-static.h gcc/ipa-static.h
--- ../gccSBaseline/gcc/ipa-static.h 2005-01-21 22:02:18.000000000 -0500
+++ gcc/ipa-static.h 2005-01-22 15:59:02.000000000 -0500
@@ -120,5 +120,10 @@ bitmap ipa_get_statics_read_global (tree
bitmap ipa_get_statics_written_global (tree fn);
bitmap ipa_get_statics_not_read_global (tree fn);
bitmap ipa_get_statics_not_written_global (tree fn);
+bool ipa_static_type_contained_p (tree type);
+bool ipa_static_address_not_taken_of_field (tree record_type, tree field_type);
+int ipa_static_star_count_of_interesting_type (tree type);
+int ipa_static_star_count_of_interesting_or_array_type (tree type);
+
#endif /* GCC_IPA_STATIC_H */
diff -r -up -x CVS ../gccSBaseline/gcc/ipa-static-vars-anal.c gcc/ipa-static-vars-anal.c
--- ../gccSBaseline/gcc/ipa-static-vars-anal.c 2005-01-21 22:02:18.000000000 -0500
+++ gcc/ipa-static-vars-anal.c 2005-01-22 15:59:02.000000000 -0500
@@ -69,6 +69,8 @@ Software Foundation, 59 Temple Place - S
#include "output.h"
#include "flags.h"
#include "timevar.h"
+#include "diagnostic.h"
+#include "langhooks.h"
/* FIXME -- PROFILE-RESTRUCTURE: change comment from DECL_UID to var-ann. */
/* This splay tree contains all of the static variables that are
@@ -95,6 +97,65 @@ static bitmap module_statics_written;
code is found that clobbers all memory. */
static bitmap all_module_statics;
+/* This bitmap contains the set of local vars that are the lhs of
+ calls to mallocs. These variables, when seen on the rhs as part of
+ a cast, the cast are not marked as doing bad things to the type
+ even though they are generally of the form
+ "foo = (type_of_foo)void_temp". */
+static bitmap results_of_malloc;
+
+/* Scratch bitmap for avoiding work. */
+static bitmap been_there_done_that;
+
+/* There are two levels of escape that types can undergo.
+
+ EXPOSED_PARAMETER - some instance of the variable is
+ passed by value into an externally visible function or some
+ instance of the variable is passed out of an externally visible
+ function as a return value. In this case any of the fields of the
+ variable that are pointer types end up having their types marked as
+ FULL_ESCAPE.
+
+ FULL_ESCAPE - when bad things happen to good types. One of the
+ following things happens to the type: (a) either an instance of the
+ variable has it's address passed to an externally visible function,
+ (b) the address is taken and some bad cast happens to the address
+ or (c) explicit arithmetic is done to the address.
+*/
+
+enum escape_t
+{
+ EXPOSED_PARAMETER,
+ FULL_ESCAPE
+};
+
+/* The following two bit vectors global_types_* correspond to
+ previous cases above. During the analysis phase, a bit is set in
+ one of these vectors if an operation of the offending class is
+ discovered to happen on the associated type. */
+
+static bitmap global_types_exposed_parameter;
+static bitmap global_types_full_escape;
+
+/* All of the types seen in this compilation unit. */
+static bitmap global_types_seen;
+static splay_tree uid_to_type;
+
+/* Map the several instances of a type into a single instance. These
+ can arise in several ways, nono of which can be justified except by
+ laziness and stupidity. */
+static splay_tree uid_to_unique_type;
+static splay_tree all_unique_types;
+
+/* A splay tree of bitmaps. An element X in the splay tree has a bit
+ set in its bitmap at TYPE_UID (TYPE_MAIN_VARIANT (Y) if there was
+ an operation in the program of the form "&X.Y". */
+static splay_tree uid_to_addressof_map;
+
+/* Tree to hold the subtype maps used to mark subtypes of escaped
+ types. */
+static splay_tree uid_to_subtype_map;
+
/* Records tree nodes seen in cgraph_create_edges. Simply using
walk_tree_without_duplicates doesn't guarantee each node is visited
once because it gets a new htab upon each recursive call from
@@ -134,6 +195,36 @@ print_order (FILE* out,
fflush(out);
}
+/* All of the "unique_type" code is a hack to get around the sleezy
+ implementation used to compile more than file. If the same type is
+ declared in several files, multiple types will appear that are the
+ same. The code in this unit chooses one "unique" instance of that
+ type as the representative and has all of the others point to
+ it. */
+
+/* Find the unique representative for a type with UID. */
+static int
+unique_type_id_for (int uid)
+{
+ splay_tree_node result =
+ splay_tree_lookup(uid_to_unique_type, (splay_tree_key) uid);
+
+ if (result)
+ return TYPE_UID((tree) result->value);
+ else
+ {
+ abort();
+ return uid;
+ }
+}
+
+/* Return true if the type with UID is the unique representative. */
+static bool
+unique_type_id_p (int uid)
+{
+ return uid == unique_type_id_for (uid);
+}
+
/* FIXME -- PROFILE-RESTRUCTURE: Remove this function, it becomes a nop. */
/* Convert IN_DECL bitmap which is indexed by DECL_UID to IN_ANN, a
bitmap indexed by var_ann (VAR_DECL)->uid. */
@@ -153,7 +244,7 @@ convert_UIDs_in_bitmap (bitmap in_ann, b
tree t = (tree)n->value;
var_ann_t va = var_ann (t);
if (va)
- bitmap_set_bit(in_ann, va->uid);
+ bitmap_set_bit (in_ann, va->uid);
}
}
}
@@ -354,6 +445,125 @@ ipa_get_statics_not_written_global (tree
return NULL;
}
+/* Return 0 if TYPE is a record or union type. Return a positive
+ number if TYPE is a pointer to a record or union. The number is
+ the number of pointer types stripped to get to the record or union
+ type. Return -1 if TYPE is none of the above. */
+
+int
+ipa_static_star_count_of_interesting_type (tree type)
+{
+ int count = 0;
+ /* Strip the *'s off. */
+ while (POINTER_TYPE_P (type))
+ {
+ type = TREE_TYPE (type);
+ count++;
+ }
+
+ /* We are interested in records, and unions only. */
+ if (TREE_CODE (type) == RECORD_TYPE
+ || TREE_CODE (type) == QUAL_UNION_TYPE
+ || TREE_CODE (type) == UNION_TYPE)
+ return count;
+ else
+ return -1;
+}
+
+
+/* Return 0 if TYPE is a record or union type. Return a positive
+ number if TYPE is a pointer to a record or union. The number is
+ the number of pointer types stripped to get to the record or union
+ type. Return -1 if TYPE is none of the above. */
+
+int
+ipa_static_star_count_of_interesting_or_array_type (tree type)
+{
+ int count = 0;
+ /* Strip the *'s off. */
+ while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
+ {
+ type = TREE_TYPE (type);
+ count++;
+ }
+
+ /* We are interested in records, and unions only. */
+ if (TREE_CODE (type) == RECORD_TYPE
+ || TREE_CODE (type) == QUAL_UNION_TYPE
+ || TREE_CODE (type) == UNION_TYPE)
+ return count;
+ else
+ return -1;
+}
+
+
+/* Return true if the record, or union TYPE passed in escapes this
+ compilation unit. */
+
+bool
+ipa_static_type_contained_p (tree type)
+{
+ int uid;
+
+ if (initialization_status == UNINITIALIZED)
+ return false;
+
+ while (POINTER_TYPE_P (type))
+ type = TREE_TYPE (type);
+
+ type = TYPE_MAIN_VARIANT (type);
+ uid = unique_type_id_for (TYPE_UID (type));
+ return bitmap_bit_p (global_types_full_escape, uid);
+}
+
+/* Return true if no fields with type FIELD_TYPE within a record of
+ RECORD_TYPE has its address taken. */
+
+bool
+ipa_static_address_not_taken_of_field (tree record_type, tree field_type)
+{
+ splay_tree_node result;
+ int uid;
+
+ if (initialization_status == UNINITIALIZED)
+ return false;
+
+ /* Strip off all of the pointer tos on the record type. Strip the
+ same number of pointer tos from the field type. If the field
+ type has fewer, it could not have been aliased. */
+ while (POINTER_TYPE_P (record_type))
+ {
+ record_type = TREE_TYPE (record_type);
+ if (POINTER_TYPE_P (field_type))
+ field_type = TREE_TYPE (field_type);
+ else
+ return true;
+ }
+
+ /* The record type must be contained. The field type may
+ escape. */
+ if (!ipa_static_type_contained_p (record_type))
+ return false;
+
+ record_type = TYPE_MAIN_VARIANT (record_type);
+ uid = unique_type_id_for (TYPE_UID (record_type));
+ result = splay_tree_lookup (uid_to_addressof_map, (splay_tree_key) uid);
+
+ if (result)
+ {
+ bitmap field_type_map = (bitmap) result->value;
+ field_type = TYPE_MAIN_VARIANT (field_type);
+ uid = unique_type_id_for (TYPE_UID (field_type));
+ /* If the bit is there, the address was taken. If not, it
+ wasn't. */
+ return !bitmap_bit_p (field_type_map, uid);
+ }
+ else
+ /* No bitmap means no addresses were taken. */
+ return true;
+}
+
+
struct searchc_env {
struct cgraph_node **stack;
int stack_size;
@@ -478,7 +688,8 @@ reduced_inorder (struct cgraph_node **or
node->next_cycle = NULL;
splay_tree_insert (env.nodes_marked_new,
- node->uid, (splay_tree_value)node);
+ (splay_tree_key)node->uid,
+ (splay_tree_value)node);
}
else
node->aux = NULL;
@@ -500,21 +711,340 @@ reduced_inorder (struct cgraph_node **or
}
return env.order_pos;
}
+
+
+
+/* Mark a TYPE as being seen. This is only called from two places:
+ mark_type_seen which only calls it with record and union types and
+ mark_interesting_addressof which can mark any field type. */
+
+static bool
+mark_any_type_seen (tree type)
+{
+ int uid;
+
+ type = TYPE_MAIN_VARIANT (type);
+ uid = TYPE_UID (type);
+ if (bitmap_bit_p (global_types_seen, uid))
+ return false;
+ else
+ {
+ splay_tree_insert (uid_to_type,
+ (splay_tree_key) uid,
+ (splay_tree_value) type);
+ bitmap_set_bit (global_types_seen, uid);
+ }
+ return true;
+}
+
+/* Mark the underlying record or union type of TYPE as being seen.
+ Pointer tos and array ofs are stripped from the type and non record
+ or unions are not considered. */
+
+static bool
+mark_type_seen (tree type)
+{
+ while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
+ type = TREE_TYPE (type);
+
+ /* We are interested in records, and unions only. */
+ if (TREE_CODE (type) == RECORD_TYPE
+ || TREE_CODE (type) == QUAL_UNION_TYPE
+ || TREE_CODE (type) == UNION_TYPE)
+ return mark_any_type_seen (type);
+ else
+ return false;
+}
+
+/* Add TYPE to the suspect type set. Return true if the bit needed to
+ be marked. */
+
+static bool
+mark_type (tree type, enum escape_t escape_status)
+{
+ bitmap map = NULL;
+ int uid;
+
+ while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
+ type = TREE_TYPE (type);
+
+ switch (escape_status)
+ {
+ case EXPOSED_PARAMETER:
+ map = global_types_exposed_parameter;
+ break;
+ case FULL_ESCAPE:
+ map = global_types_full_escape;
+ break;
+ }
+
+ uid = TYPE_UID (TYPE_MAIN_VARIANT (type));
+ if (bitmap_bit_p (map, uid))
+ return false;
+ else
+ {
+ bitmap_set_bit (map, uid);
+ mark_type_seen (type);
+
+ if (escape_status == FULL_ESCAPE)
+ {
+ /* Effeciency hack. When things are bad, do not mess around
+ with this type anymore. */
+ bitmap_set_bit (global_types_exposed_parameter, uid);
+ }
+ }
+ return true;
+}
+
+/* Add interesting TYPE to the suspect type set. If the set is
+ EXPOSED_PARAMETER and the TYPE is a pointer type, the set is
+ changed to FULL_ESCAPE. */
+
+static void
+mark_interesting_type (tree type, enum escape_t escape_status)
+{
+ if (ipa_static_star_count_of_interesting_type (type) >= 0)
+ {
+ if ((escape_status == EXPOSED_PARAMETER)
+ && POINTER_TYPE_P (type))
+ /* EXPOSED_PARAMETERs are only structs or unions are passed by
+ value. Anything passed by reference to an external
+ function fully exposes the type. */
+ mark_type (type, FULL_ESCAPE);
+ else
+ mark_type (type, escape_status);
+ }
+}
+
+/* Return true if PARENT is supertype of CHILD. Both types must be
+ known to be structures or unions. */
+
+static bool
+parent_type_p (tree parent, tree child)
+{
+ int i;
+ tree binfo, base_binfo;
+ if (TYPE_BINFO (parent))
+ for (binfo = TYPE_BINFO (parent), i = 0;
+ BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
+ {
+ tree binfotype = BINFO_TYPE (base_binfo);
+ if (binfotype == child)
+ return true;
+ else if (parent_type_p (binfotype, child))
+ return true;
+ }
+ if (TREE_CODE (parent) == UNION_TYPE
+ || TREE_CODE (parent) == QUAL_UNION_TYPE)
+ {
+ tree field;
+ /* Search all of the variants in the union to see if one of them
+ is the child. */
+ for (field = TYPE_FIELDS (parent);
+ field;
+ field = TREE_CHAIN (field))
+ {
+ tree field_type;
+ if (TREE_CODE (field) != FIELD_DECL)
+ continue;
+
+ field_type = TREE_TYPE (field);
+ if (field_type == child)
+ return true;
+ }
+
+ /* If we did not find it, recursively ask the variants if one of
+ their children is the child type. */
+ for (field = TYPE_FIELDS (parent);
+ field;
+ field = TREE_CHAIN (field))
+ {
+ tree field_type;
+ if (TREE_CODE (field) != FIELD_DECL)
+ continue;
+
+ field_type = TREE_TYPE (field);
+ if (TREE_CODE (field_type) == RECORD_TYPE
+ || TREE_CODE (field_type) == QUAL_UNION_TYPE
+ || TREE_CODE (field_type) == UNION_TYPE)
+ if (parent_type_p (field_type, child))
+ return true;
+ }
+ }
+
+ if (TREE_CODE (parent) == RECORD_TYPE)
+ {
+ tree field;
+ for (field = TYPE_FIELDS (parent);
+ field;
+ field = TREE_CHAIN (field))
+ {
+ tree field_type;
+ if (TREE_CODE (field) != FIELD_DECL)
+ continue;
+
+ field_type = TREE_TYPE (field);
+ if (field_type == child)
+ return true;
+ /* You can only cast to the first field so if it does not
+ match, quit. */
+ if (TREE_CODE (field_type) == RECORD_TYPE
+ || TREE_CODE (field_type) == QUAL_UNION_TYPE
+ || TREE_CODE (field_type) == UNION_TYPE)
+ {
+ if (parent_type_p (field_type, child))
+ return true;
+ else
+ break;
+ }
+ }
+ }
+ return false;
+}
+
+/* Return the number of pointer tos for TYPE and return TYPE with all
+ of these stripped off. */
+
+static int
+count_stars (tree* type_ptr)
+{
+ tree type = *type_ptr;
+ int i = 0;
+ while (POINTER_TYPE_P (type))
+ {
+ type = TREE_TYPE (type);
+ i++;
+ }
+
+ *type_ptr = type;
+ return i;
+}
+
+enum cast_type {
+ CT_UP,
+ CT_DOWN,
+ CT_SIDEWAYS,
+ CT_USELESS
+};
+
+/* Check the cast FROM_TYPE to TO_TYPE. This function requires that
+ the two types have already passed the
+ ipa_static_star_count_of_interesting_type test. */
+
+static enum cast_type
+check_cast_type (tree to_type, tree from_type)
+{
+ int to_stars = count_stars (&to_type);
+ int from_stars = count_stars (&from_type);
+ if (to_stars != from_stars)
+ return CT_SIDEWAYS;
+
+ if (to_type == from_type)
+ return CT_USELESS;
+
+ if (parent_type_p (to_type, from_type)) return CT_UP;
+ if (parent_type_p (from_type, to_type)) return CT_DOWN;
+ return CT_SIDEWAYS;
+}
+
+/* Check a cast FROM this variable, TO_TYPE. Mark the escaping types
+ if appropriate. */
+static void
+check_cast (tree to_type, tree from)
+{
+ tree from_type = TYPE_MAIN_VARIANT (TREE_TYPE (from));
+ bool to_interesting_type, from_interesting_type;
+
+ to_type = TYPE_MAIN_VARIANT (to_type);
+ if (from_type == to_type)
+ {
+ mark_type_seen (to_type);
+ return;
+ }
+
+ to_interesting_type =
+ ipa_static_star_count_of_interesting_type (to_type) >= 0;
+ from_interesting_type =
+ ipa_static_star_count_of_interesting_type (from_type) >= 0;
+
+ if (to_interesting_type)
+ if (from_interesting_type)
+ {
+ /* Both types are interesting. This can be one of four types
+ of cast: useless, up, down, or sideways. We do not care
+ about up or useless. Sideways casts are always bad and
+ both sides get marked as escaping. Downcasts are not
+ interesting here because if type is marked as escaping, all
+ of it's subtypes escape. */
+ switch (check_cast_type (to_type, from_type))
+ {
+ case CT_UP:
+ case CT_USELESS:
+ case CT_DOWN:
+ mark_type_seen (to_type);
+ mark_type_seen (from_type);
+ break;
+
+ case CT_SIDEWAYS:
+ mark_type (to_type, FULL_ESCAPE);
+ mark_type (from_type, FULL_ESCAPE);
+ break;
+ }
+ }
+ else
+ {
+ /* If this is a cast from the local that is a result from a
+ call to malloc, do not mark the cast as bad. */
+ if (DECL_P (from) && !bitmap_bit_p (results_of_malloc, DECL_UID (from)))
+ mark_type (to_type, FULL_ESCAPE);
+ else
+ mark_type_seen (to_type);
+ }
+ else if (from_interesting_type)
+ mark_type (from_type, FULL_ESCAPE);
+}
+
+/* Register the parameter and return types of function FN as
+ escaping. */
+static void
+check_function_parameter_and_return_types (tree fn, bool escapes)
+{
+ tree arg;
+
+ for (arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
+ arg && TREE_VALUE (arg) != void_type_node;
+ arg = TREE_CHAIN (arg))
+ {
+ if (escapes)
+ mark_interesting_type (TREE_VALUE (arg), EXPOSED_PARAMETER);
+ else
+ mark_type_seen (TREE_VALUE (arg));
+ }
+
+ if (escapes)
+ mark_interesting_type (TREE_TYPE (TREE_TYPE (fn)), EXPOSED_PARAMETER);
+ else
+ mark_type_seen (TREE_TYPE (TREE_TYPE (fn)));
+}
/* Add VAR to all_module_statics and the two static_vars_to_consider*
sets. */
-static inline
-void add_static_var (tree var)
+static inline void
+add_static_var (tree var)
{
- /* FIXME -- PROFILE-RESTRUCTURE: Change the call from
- DECL_UID to get the uid from the var_ann field. */
- splay_tree_insert (static_vars_to_consider_by_uid,
- DECL_UID (var), (splay_tree_value)var);
-
- /* FIXME -- PROFILE-RESTRUCTURE: Change the call from
- DECL_UID to get the uid from the var_ann field. */
- bitmap_set_bit (all_module_statics, DECL_UID (var));
+ int uid = DECL_UID (var);
+ if (!bitmap_bit_p (all_module_statics, uid))
+ {
+ /* FIXME -- PROFILE-RESTRUCTURE: Change the call from
+ DECL_UID to get the uid from the var_ann field. */
+ splay_tree_insert (static_vars_to_consider_by_uid,
+ uid, (splay_tree_value)var);
+
+ /* FIXME -- PROFILE-RESTRUCTURE: Change the call from
+ DECL_UID to get the uid from the var_ann field. */
+ bitmap_set_bit (all_module_statics, uid);
+ }
}
/* FIXME this needs to be enhanced. If we are compiling a single
@@ -584,23 +1114,30 @@ check_operand (ipa_local_static_vars_inf
{
if (!t) return;
+ /* This is an assignment from a function, register the types as
+ escaping. */
+ if (TREE_CODE (t) == FUNCTION_DECL)
+ check_function_parameter_and_return_types (t, true);
+
/* FIXME -- PROFILE-RESTRUCTURE: Change the call from DECL_UID to
get the uid from the var_ann field. */
- if ((TREE_CODE (t) == VAR_DECL)
- && has_proper_scope_for_analysis (local, t, checking_write))
+ else if (TREE_CODE (t) == VAR_DECL)
{
- if (checking_write)
+ mark_type_seen (TREE_TYPE (t));
+ if (has_proper_scope_for_analysis (local, t, checking_write))
{
- if (local)
- bitmap_set_bit (local->statics_written_by_decl_uid, DECL_UID (t));
- /* Mark the write so we can tell which statics are
- readonly. */
- bitmap_set_bit (module_statics_written, DECL_UID (t));
+ if (checking_write)
+ {
+ if (local)
+ bitmap_set_bit (local->statics_written_by_decl_uid, DECL_UID (t));
+ /* Mark the write so we can tell which statics are
+ readonly. */
+ bitmap_set_bit (module_statics_written, DECL_UID (t));
+ }
+ else if (local)
+ bitmap_set_bit (local->statics_read_by_decl_uid, DECL_UID (t));
}
- else if (local)
- bitmap_set_bit (local->statics_read_by_decl_uid, DECL_UID (t));
}
- else return;
}
/* Examine tree T for references to static variables. All internal
@@ -626,7 +1163,7 @@ check_tree (ipa_local_static_vars_info_t
/* The bottom of an indirect reference can only be read, not
written. So just recurse and whatever we find, check it against
the read bitmaps. */
- if (INDIRECT_REF_P (t))
+ if (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
{
check_tree (local, TREE_OPERAND (t, 0), false);
@@ -644,10 +1181,68 @@ check_tree (ipa_local_static_vars_info_t
}
}
- if (SSA_VAR_P (t))
+ if (SSA_VAR_P (t) || (TREE_CODE (t) == FUNCTION_DECL))
check_operand (local, t, checking_write);
}
+/* Given a memory reference T, will return the variable at the bottom
+ of the access. Unlike get_base_address, this will recurse thru
+ INDIRECT_REFS. */
+
+static tree
+get_base_var (tree t)
+{
+ if ((TREE_CODE (t) == EXC_PTR_EXPR) || (TREE_CODE (t) == FILTER_EXPR))
+ return t;
+
+ while (!SSA_VAR_P (t)
+ && (!CONSTANT_CLASS_P (t))
+ && TREE_CODE (t) != LABEL_DECL
+ && TREE_CODE (t) != FUNCTION_DECL
+ && TREE_CODE (t) != CONST_DECL)
+ {
+ t = TREE_OPERAND (t, 0);
+ }
+ return t;
+}
+
+/* Create an address_of edge FROM_TYPE.TO_TYPE. */
+static void
+mark_interesting_addressof (tree to_type, tree from_type)
+{
+ from_type = TYPE_MAIN_VARIANT (from_type);
+ to_type = TYPE_MAIN_VARIANT (to_type);
+ if (ipa_static_star_count_of_interesting_type (from_type) == 0)
+ {
+ int uid = TYPE_UID (from_type);
+ bitmap from_type_map;
+ splay_tree_node result =
+ splay_tree_lookup (uid_to_addressof_map, (splay_tree_key) uid);
+
+ if (result)
+ from_type_map = (bitmap) result->value;
+ else
+ {
+ from_type_map = BITMAP_ALLOC (&ipa_obstack);
+ splay_tree_insert (uid_to_addressof_map,
+ uid,
+ (splay_tree_value)from_type_map);
+ }
+ bitmap_set_bit (from_type_map, TYPE_UID (to_type));
+ mark_type_seen (from_type);
+ mark_any_type_seen (to_type);
+ }
+ else
+ {
+ fprintf(stderr, "trying to mark the address of pointer type ");
+ print_generic_expr (stderr, from_type, 0);
+ fprintf(stderr, "\n");
+ abort ();
+ }
+}
+
+
+
/* Scan tree T to see if there are any addresses taken in within T. */
static void
@@ -656,6 +1251,25 @@ look_for_address_of (ipa_local_static_va
if (TREE_CODE (t) == ADDR_EXPR)
{
tree x = get_base_var (t);
+ tree cref = TREE_OPERAND (t, 0);
+
+ /* If we have an expression of the form "&a.b.c.d", mark a.b,
+ b.c and c.d. as having it's address taken. */
+ tree fielddecl = NULL_TREE;
+ while (cref!= x)
+ {
+ if (TREE_CODE (cref) == COMPONENT_REF)
+ {
+ fielddecl = TREE_OPERAND (cref, 1);
+ mark_interesting_addressof (TREE_TYPE (fielddecl),
+ DECL_FIELD_CONTEXT (fielddecl));
+ }
+ else if (TREE_CODE (cref) == ARRAY_REF)
+ mark_type_seen (TREE_TYPE (cref));
+
+ cref = TREE_OPERAND (cref, 0);
+ }
+
if (TREE_CODE (x) == VAR_DECL)
{
if (has_proper_scope_for_analysis (local, x, false))
@@ -673,6 +1287,39 @@ look_for_address_of (ipa_local_static_va
}
+/* Scan tree T to see if there are any casts within it.
+ LHS Is the LHS of the expression involving the cast. */
+
+static void
+look_for_casts (tree lhs __attribute__((unused)), tree t)
+{
+ if (is_gimple_cast (t) || TREE_CODE (t) == VIEW_CONVERT_EXPR)
+ {
+ tree castfromvar = TREE_OPERAND (t, 0);
+ check_cast (TREE_TYPE (t), castfromvar);
+ }
+ else if (TREE_CODE (t) == COMPONENT_REF
+ || TREE_CODE (t) == INDIRECT_REF
+ || TREE_CODE (t) == BIT_FIELD_REF)
+ {
+ tree base = get_base_address (t);
+ while (t != base)
+ {
+ t = TREE_OPERAND (t, 0);
+ if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
+ {
+ /* This may be some part of a component ref.
+ IE it may be a.b.VIEW_CONVERT_EXPR<weird_type>(c).d, AFAIK.
+ castfromref will give you a.b.c, not a. */
+ tree castfromref = TREE_OPERAND (t, 0);
+ check_cast (TREE_TYPE (t), castfromref);
+ }
+ else if (TREE_CODE (t) == COMPONENT_REF)
+ mark_type_seen (TREE_TYPE (TREE_OPERAND (t, 1)));
+ }
+ }
+}
+
/* Check to see if T is a read or address of operation on a static var
we are interested in analyzing. LOCAL is passed in to get access to
its bit vectors. */
@@ -774,13 +1421,15 @@ get_asm_expr_operands (ipa_local_static_
parameter is the tree node for the caller and the second operand is
the tree node for the entire call expression. */
-static void
-process_call_for_static_vars(ipa_local_static_vars_info_t local, tree call_expr)
+static bool
+check_call (ipa_local_static_vars_info_t local,
+ tree call_expr)
{
int flags = call_expr_flags(call_expr);
tree operandList = TREE_OPERAND (call_expr, 1);
tree operand;
tree callee_t = get_callee_fndecl (call_expr);
+ tree argument;
struct cgraph_node* callee;
enum availability avail = AVAIL_NOT_AVAILABLE;
@@ -806,9 +1455,41 @@ process_call_for_static_vars(ipa_local_s
if (callee_t)
{
+ tree arg_type;
+ tree last_arg_type;
callee = cgraph_node(callee_t);
avail = cgraph_function_body_availability (callee);
+ /* If the function is POINTER_NO_ESCAPE or a wrapper it is
+ allowed to make an implicit cast to void* without causing the
+ type to escape. */
+ if (!flags & ECF_POINTER_NO_ESCAPE)
+ {
+ /* Check that there are no implicit casts in the passing of
+ parameters. */
+ operand = operandList;
+ for (arg_type = TYPE_ARG_TYPES (TREE_TYPE (callee_t));
+ arg_type && TREE_VALUE (arg_type) != void_type_node;
+ arg_type = TREE_CHAIN (arg_type))
+ {
+ operand = TREE_CHAIN (operand);
+ argument = TREE_VALUE (operand);
+ check_cast (arg_type, argument);
+ last_arg_type = arg_type;
+ }
+
+ /* In the case where we have a var_args function, we need to
+ check the remaining parameters against the last argument. */
+ arg_type = last_arg_type;
+ for (;
+ operand != NULL_TREE;
+ operand = TREE_CHAIN (operand))
+ {
+ argument = TREE_VALUE (operand);
+ check_cast (arg_type, argument);
+ }
+ }
+
/* When bad things happen to bad functions, they cannot be const
or pure. */
if (local && local->pure_const_not_set_in_source)
@@ -827,43 +1508,53 @@ process_call_for_static_vars(ipa_local_s
}
}
- /* If the callee has already been marked as ECF_CONST, we need look
- no further since it cannot look at any memory except
- constants. However, if the callee is only ECF_PURE we need to
- look because if there is also code, we need to mark the variables
- it is reading from. */
- if (flags & ECF_CONST)
- return;
-
- if (!local) return;
-
/* The callee is either unknown (indirect call) or there is just no
- scanable code for it (external call) . We look to see if there
+ scannable code for it (external call) . We look to see if there
are any bits available for the callee (such as by declaration or
because it is builtin) and process solely on the basis of those
bits. */
if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
{
- if (flags & ECF_PURE)
+ if (!flags & ECF_POINTER_NO_ESCAPE)
{
- local->calls_read_all = true;
- if (local->pure_const_not_set_in_source
- && local->pure_const_state == IPA_CONST)
- local->pure_const_state = IPA_PURE;
+ /* If this is a direct call to an external function, mark all of
+ the parameter and return types. */
+ for (operand = operandList;
+ operand != NULL_TREE;
+ operand = TREE_CHAIN (operand))
+ {
+ mark_interesting_type (TREE_TYPE (TREE_VALUE (operand)),
+ EXPOSED_PARAMETER);
+ }
+
+ if (callee_t)
+ mark_interesting_type (TREE_TYPE (TREE_TYPE (callee_t)),
+ EXPOSED_PARAMETER);
}
- else
+
+ if (local)
{
- local->calls_read_all = true;
- local->calls_write_all = true;
- if (local->pure_const_not_set_in_source)
- local->pure_const_state = IPA_NEITHER;
+ if (flags & ECF_PURE)
+ {
+ local->calls_read_all = true;
+ if (local->pure_const_not_set_in_source
+ && local->pure_const_state == IPA_CONST)
+ local->pure_const_state = IPA_PURE;
+ }
+ else
+ {
+ local->calls_read_all = true;
+ local->calls_write_all = true;
+ if (local->pure_const_not_set_in_source)
+ local->pure_const_state = IPA_NEITHER;
+ }
}
}
else
{
/* We have the code and we will scan it for the effects. */
- if (flags & ECF_PURE)
+ if (local && (flags & ECF_PURE))
{
/* Since we have the code for the function, we do not need to
set calls_read_all, we can determine the precise reads
@@ -873,6 +1564,33 @@ process_call_for_static_vars(ipa_local_s
local->pure_const_state = IPA_PURE;
}
}
+
+ return (flags & ECF_MALLOC);
+}
+
+/* OP0 is the one we *know* is a pointer type.
+ OP1 may be a pointer type. */
+static bool
+okay_pointer_operation (enum tree_code code, tree op0, tree op1)
+{
+ tree op0type = TREE_TYPE (op0);
+ tree op1type = TREE_TYPE (op1);
+ if (POINTER_TYPE_P (op1type))
+ return false;
+ switch (code)
+ {
+ case MULT_EXPR:
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ /* TODO: Handle multiples of op0 size as well */
+ if (operand_equal_p (size_in_bytes (op0type), op1, 0))
+ return true;
+ /* fallthrough */
+
+ default:
+ return false;
+ }
+ return false;
}
/* FIXME -- PROFILE-RESTRUCTURE: Change to walk by explicitly walking
@@ -897,7 +1615,7 @@ scan_for_static_refs (tree *tp,
ipa_local_static_vars_info_t local = NULL;
if (fn)
local = fn->static_vars_info->local;
-
+
switch (TREE_CODE (t))
{
case VAR_DECL:
@@ -909,20 +1627,66 @@ scan_for_static_refs (tree *tp,
case MODIFY_EXPR:
{
/* First look on the lhs and see what variable is stored to */
+ tree lhs = TREE_OPERAND (t, 0);
tree rhs = TREE_OPERAND (t, 1);
- check_lhs_var (local, TREE_OPERAND (t, 0));
+
+ check_lhs_var (local, lhs);
+ check_cast (TREE_TYPE (lhs), rhs);
+
+ /* For the purposes of figuring out what the cast affects */
/* Next check the operands on the rhs to see if they are ok. */
switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
{
- case tcc_binary:
- check_rhs_var (local, TREE_OPERAND (rhs, 0));
- check_rhs_var (local, TREE_OPERAND (rhs, 1));
+ case tcc_binary:
+ {
+ tree op0 = TREE_OPERAND (rhs, 0);
+ tree op1 = TREE_OPERAND (rhs, 1);
+
+ /* If this is pointer arithmetic of any bad sort, then
+ we need to mark the types as bad. For binary
+ operations, no binary operator we currently support
+ is always "safe" in regard to what it would do to
+ pointers for purposes of determining which types
+ escape, except operations of the size of the type.
+ It is possible that min and max under the right set
+ of circumstances and if the moon is in the correct
+ place could be safe, but it is hard to see how this
+ is worth the effort. */
+
+ if (POINTER_TYPE_P (TREE_TYPE (op0))
+ && !okay_pointer_operation (TREE_CODE (rhs), op0, op1))
+ mark_interesting_type (TREE_TYPE (op0), FULL_ESCAPE);
+ if (POINTER_TYPE_P (TREE_TYPE (op1))
+ && !okay_pointer_operation (TREE_CODE (rhs), op1, op0))
+ mark_interesting_type (TREE_TYPE (op1), FULL_ESCAPE);
+
+ look_for_casts (lhs, op0);
+ look_for_casts (lhs, op1);
+ check_rhs_var (local, op0);
+ check_rhs_var (local, op1);
+ }
break;
case tcc_unary:
- check_rhs_var (local, TREE_OPERAND (rhs, 0));
+ {
+ tree op0 = TREE_OPERAND (rhs, 0);
+ /* For unary operations, if the operation is NEGATE or
+ ABS on a pointer, this is also considered pointer
+ arithmetic and thus, bad for business. */
+ if ((TREE_CODE (op0) == NEGATE_EXPR
+ || TREE_CODE (op0) == ABS_EXPR)
+ && POINTER_TYPE_P (TREE_TYPE (op0)))
+ {
+ mark_interesting_type (TREE_TYPE (op0), FULL_ESCAPE);
+ }
+ check_rhs_var (local, op0);
+ look_for_casts (lhs, op0);
+ look_for_casts (lhs, rhs);
+ }
+
break;
case tcc_reference:
+ look_for_casts (lhs, rhs);
check_rhs_var (local, rhs);
break;
case tcc_declaration:
@@ -932,10 +1696,15 @@ scan_for_static_refs (tree *tp,
switch (TREE_CODE (rhs))
{
case ADDR_EXPR:
+ look_for_casts (lhs, TREE_OPERAND (rhs, 0));
check_rhs_var (local, rhs);
break;
case CALL_EXPR:
- process_call_for_static_vars (local, rhs);
+ /* If this is a call to malloc, squirrel away the
+ result so we do mark the resulting cast as being
+ bad. */
+ if (check_call (local, rhs))
+ bitmap_set_bit (results_of_malloc, DECL_UID (lhs));
break;
default:
break;
@@ -967,7 +1736,7 @@ scan_for_static_refs (tree *tp,
break;
case CALL_EXPR:
- process_call_for_static_vars (local, t);
+ check_call (local, t);
*walk_subtrees = 0;
break;
@@ -982,6 +1751,7 @@ scan_for_static_refs (tree *tp,
return NULL;
}
+
/* Lookup the tree node for the static variable that has UID. */
static tree
get_static_decl_by_uid (int index)
@@ -1047,7 +1817,7 @@ propagate_bits (struct cgraph_node *x)
if (y_global->statics_read_by_decl_uid
== all_module_statics)
{
- BITMAP_XFREE(x_global->statics_read_by_decl_uid);
+ BITMAP_XFREE (x_global->statics_read_by_decl_uid);
x_global->statics_read_by_decl_uid
= all_module_statics;
}
@@ -1064,7 +1834,7 @@ propagate_bits (struct cgraph_node *x)
if (y_global->statics_written_by_decl_uid
== all_module_statics)
{
- BITMAP_XFREE(x_global->statics_written_by_decl_uid);
+ BITMAP_XFREE (x_global->statics_written_by_decl_uid);
x_global->statics_written_by_decl_uid
= all_module_statics;
}
@@ -1145,7 +1915,7 @@ merge_callee_local_info (struct cgraph_n
/* The init routine for analyzing global static variable usage. See
comments at top for description. */
-
+int cant_touch = 0;
static void
ipa_init (void)
{
@@ -1160,6 +1930,14 @@ ipa_init (void)
module_statics_escape = BITMAP_ALLOC (&ipa_obstack);
module_statics_written = BITMAP_ALLOC (&ipa_obstack);
all_module_statics = BITMAP_ALLOC (&ipa_obstack);
+ global_types_seen = BITMAP_ALLOC (&ipa_obstack);
+ global_types_exposed_parameter = BITMAP_ALLOC (&ipa_obstack);
+ global_types_full_escape = BITMAP_ALLOC (&ipa_obstack);
+ results_of_malloc = BITMAP_ALLOC (&ipa_obstack);
+ cant_touch = 1;
+ uid_to_type = splay_tree_new (splay_tree_compare_ints, 0, 0);
+ uid_to_subtype_map = splay_tree_new (splay_tree_compare_ints, 0, 0);
+ uid_to_addressof_map = splay_tree_new (splay_tree_compare_ints, 0, 0);
/* There are some shared nodes, in particular the initializers on
static declarations. We do not need to scan them more than once
@@ -1188,13 +1966,20 @@ analyze_variable (struct cgraph_varpool_
break;
case FINISHED:
-/* fprintf(stderr, */
-/* "AV analyze_variable called after execute for variable %s\n" , */
-/* lang_hooks.decl_printable_name (global, 2)); */
- /*abort ();*/
+ fprintf(stderr,
+ "AV analyze_variable called after execute for variable %s\n" ,
+ lang_hooks.decl_printable_name (global, 2));
+ abort ();
break;
}
+ /* If this variable has exposure beyond the compilation unit, add
+ it's type to the global types. */
+ if (vnode->externally_visible)
+ mark_interesting_type (TREE_TYPE (global), FULL_ESCAPE);
+ else
+ mark_type_seen (TREE_TYPE (global));
+
if (TREE_CODE (global) == VAR_DECL)
{
if (DECL_INITIAL (global))
@@ -1238,6 +2023,11 @@ analyze_function (struct cgraph_node *fn
/* fprintf(stderr, "cfg=%x\n", */
/* DECL_STRUCT_FUNCTION (fn->decl) -> cfg); */
+ /* If this function can be called from the outside, register the
+ types as escaping. */
+ check_function_parameter_and_return_types (decl, fn->local.externally_visible);
+
+
/* Add the info to the tree's annotation. */
fn->static_vars_info = info;
var_ann->static_vars_info = info;
@@ -1311,9 +2101,13 @@ analyze_function (struct cgraph_node *fn
for (step = BLOCK_VARS (DECL_INITIAL (decl));
step;
step = TREE_CHAIN (step))
- if (DECL_INITIAL (step))
- walk_tree (&DECL_INITIAL (step), scan_for_static_refs,
- fn, visited_nodes);
+ {
+ if (DECL_INITIAL (step))
+ walk_tree (&DECL_INITIAL (step), scan_for_static_refs,
+ fn, visited_nodes);
+ mark_type_seen (TREE_TYPE (step));
+ }
+
}
/* Also look here for private statics. */
@@ -1328,15 +2122,513 @@ analyze_function (struct cgraph_node *fn
if (DECL_INITIAL (var) && TREE_STATIC (var))
walk_tree (&DECL_INITIAL (var), scan_for_static_refs,
fn, visited_nodes);
+ mark_type_seen (TREE_TYPE (var));
+ }
+ }
+}
+
+
+
+/* Convert a type_UID into a type. */
+static tree
+type_for_uid (int uid)
+{
+ splay_tree_node result =
+ splay_tree_lookup (uid_to_type, (splay_tree_key) uid);
+
+ if (result)
+ return (tree) result->value;
+ else return NULL;
+}
+
+/* Return the a bitmap with the subtypes of the type for UID. If it
+ does not exist, return either NULL or a new bitmap depending on the
+ value of CREATE. */
+
+static bitmap
+subtype_map_for_uid (int uid, bool create)
+{
+ splay_tree_node result =
+ splay_tree_lookup (uid_to_subtype_map, (splay_tree_key) uid);
+
+ if (result)
+ return (bitmap) result->value;
+ else if (create)
+ {
+ bitmap subtype_map = BITMAP_ALLOC (&ipa_obstack);
+ splay_tree_insert (uid_to_subtype_map,
+ uid,
+ (splay_tree_value)subtype_map);
+ return subtype_map;
+ }
+ else return NULL;
+}
+
+/* Mark all of the supertypes and field types of TYPE as being seen.
+ Also accumulate the subtypes for each type so that
+ close_types_full_escape can mark a subtype as escaping if the
+ supertype escapes. */
+
+static void
+close_type_seen (tree type)
+{
+ tree field;
+ int i, uid;
+ tree binfo, base_binfo;
+
+ /* See thru all pointer tos and array ofs. */
+ while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
+ type = TREE_TYPE (type);
+
+ type = TYPE_MAIN_VARIANT (type);
+ uid = TYPE_UID (type);
+
+ if (bitmap_bit_p (been_there_done_that, uid))
+ return;
+ bitmap_set_bit (been_there_done_that, uid);
+
+ /* If we are doing a language with a type heirarchy, mark all of
+ the superclasses. */
+ if (TYPE_BINFO (type))
+ for (binfo = TYPE_BINFO (type), i = 0;
+ BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
+ {
+ tree binfo_type = BINFO_TYPE (base_binfo);
+ bitmap subtype_map = subtype_map_for_uid
+ (TYPE_UID (TYPE_MAIN_VARIANT (binfo_type)), true);
+ bitmap_set_bit (subtype_map, uid);
+ if (mark_type_seen (binfo_type))
+ close_type_seen (binfo_type);
+ }
+
+ /* If the field is a struct or union type, mark all of the
+ subfields. */
+ for (field = TYPE_FIELDS (type);
+ field;
+ field = TREE_CHAIN (field))
+ {
+ tree field_type;
+ if (TREE_CODE (field) != FIELD_DECL)
+ continue;
+
+ field_type = TREE_TYPE (field);
+ if (ipa_static_star_count_of_interesting_or_array_type (field_type) >= 0)
+ if (mark_type_seen (field_type))
+ close_type_seen (field_type);
+ }
+}
+
+struct type_brand_s
+{
+ char* name;
+ int seq;
+};
+
+/* Splay tree comparison function on type_brand_s structures. */
+
+static int
+compare_type_brand (splay_tree_key sk1, splay_tree_key sk2)
+{
+ struct type_brand_s * k1 = (struct type_brand_s *) sk1;
+ struct type_brand_s * k2 = (struct type_brand_s *) sk2;
+
+ int value = strcmp(k1->name, k2->name);
+ if (value == 0)
+ return k2->seq - k1->seq;
+ else
+ return value;
+}
+
+/* Get the name of TYPE or return the string "<UNNAMED>". */
+static char*
+get_name_of_type (tree type)
+{
+ tree name = TYPE_NAME (type);
+
+ if (!name)
+ /* Unnamed type, do what you like here. */
+ return (char*)"<UNNAMED>";
+
+ /* It will be a TYPE_DECL in the case of a typedef, otherwise, an
+ identifier_node */
+ if (TREE_CODE (name) == TYPE_DECL)
+ {
+ /* Each DECL has a DECL_NAME field which contains an
+ IDENTIFIER_NODE. (Some decls, most often labels, may have
+ zero as the DECL_NAME). */
+ if (DECL_NAME (name))
+ return (char*)IDENTIFIER_POINTER (DECL_NAME (name));
+ else
+ /* Unnamed type, do what you like here. */
+ return (char*)"<UNNAMED>";
+ }
+ else if (TREE_CODE (name) == IDENTIFIER_NODE)
+ return (char*)IDENTIFIER_POINTER (name);
+ else
+ return (char*)"<UNNAMED>";
+}
+
+
+/* Use a completely lame algorithm for removing duplicate types. This
+ code should not be here except for a bad implementation of whole
+ program compilation. */
+/* Return either TYPE if this is first time TYPE has been seen an
+ compatible TYPE that has already been processed. */
+
+static tree
+discover_unique_type (tree type)
+{
+ struct type_brand_s * brand = xmalloc(sizeof(struct type_brand_s));
+ int i = 0;
+ splay_tree_node result;
+
+ while (1)
+ {
+ brand->name = get_name_of_type (type);
+ brand->seq = i;
+ result = splay_tree_lookup (all_unique_types, (splay_tree_key) brand);
+ if (result)
+ {
+ tree other_type = (tree) result->value;
+ if (lang_hooks.types_compatible_p (type, other_type) == 1)
+ {
+ free (brand);
+ return other_type;
+ }
+ /* Not compatible, look for next instance with same name. */
+ }
+ else
+ {
+ /* No more instances, create new one. */
+ brand->seq = i++;
+ splay_tree_insert (all_unique_types,
+ (splay_tree_key) brand,
+ (splay_tree_value) type);
+
+ return type;
+ }
+ i++;
+ }
+}
+
+/* Take a TYPE that has been passed by value to an external function
+ and mark all of the fields that have pointer types as escaping. For
+ any of the non pointer types that are structures or unions,
+ recurse. TYPE is never a pointer type. */
+
+static void
+close_type_exposed_parameter (tree type)
+{
+ tree field;
+ int uid = TYPE_UID (TYPE_MAIN_VARIANT (type));
+
+ if (bitmap_bit_p (been_there_done_that, uid))
+ return;
+ bitmap_set_bit (been_there_done_that, uid);
+
+ /* If the field is a struct or union type, mark all of the
+ subfields. */
+ for (field = TYPE_FIELDS (type);
+ field;
+ field = TREE_CHAIN (field))
+ {
+ tree field_type;
+
+ if (TREE_CODE (field) != FIELD_DECL)
+ continue;
+
+ field_type = TREE_TYPE (field);
+ mark_interesting_type (field_type, EXPOSED_PARAMETER);
+
+ if (ipa_static_star_count_of_interesting_type (field_type) == 0)
+ close_type_exposed_parameter (field_type);
+ }
+}
+
+/* The next function handles the case where a type fully escapes.
+ This means that not only does the type itself escape,
+
+ a) the type of every field recursively escapes
+ b) the type of every subtype escapes as well as the super as well
+ as all of the pointer to types for each field.
+
+ Note that pointer to types are not marked as escaping. If the
+ pointed to type escapes, the pointer to type also escapes.
+
+ Take a TYPE that has had the address taken for an instance of it
+ and mark all of the types for its fields as having their addresses
+ taken. */
+
+static void
+close_type_full_escape (tree type)
+{
+ tree field;
+ unsigned int i;
+ int uid;
+ tree binfo, base_binfo;
+ bitmap_iterator bi;
+ bitmap subtype_map;
+
+ /* Strip off any pointer or array types. */
+ while (POINTER_TYPE_P (type) || TREE_CODE(type) == ARRAY_TYPE)
+ type = TREE_TYPE (type);
+
+ type = TYPE_MAIN_VARIANT (type);
+ uid = TYPE_UID (type);
+
+ if (bitmap_bit_p (been_there_done_that, uid))
+ return;
+ bitmap_set_bit (been_there_done_that, uid);
+
+ subtype_map = subtype_map_for_uid (uid, false);
+
+ /* If we are doing a language with a type heirarchy, mark all of
+ the superclasses. */
+ if (TYPE_BINFO (type))
+ for (binfo = TYPE_BINFO (type), i = 0;
+ BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
+ {
+ tree binfotype = BINFO_TYPE (base_binfo);
+ if (mark_type (binfotype, FULL_ESCAPE))
+ close_type_full_escape (binfotype);
+ }
+
+ /* Mark as escaped any types that have been down casted to
+ this type. */
+ if (subtype_map)
+ EXECUTE_IF_SET_IN_BITMAP (subtype_map, 0, i, bi)
+ {
+ tree subtype = type_for_uid (i);
+ if (mark_type (subtype, FULL_ESCAPE))
+ close_type_full_escape (subtype);
+ }
+
+ /* If the field is a struct or union type, mark all of the
+ subfields. */
+ for (field = TYPE_FIELDS (type);
+ field;
+ field = TREE_CHAIN (field))
+ {
+ tree field_type;
+ if (TREE_CODE (field) != FIELD_DECL)
+ continue;
+
+ field_type = TREE_TYPE (field);
+ if (ipa_static_star_count_of_interesting_or_array_type (field_type) >= 0)
+ if (mark_type (field_type, FULL_ESCAPE))
+ close_type_full_escape (field_type);
+ }
+}
+
+/* It is not necessary to carry around the addressof map for any
+ escaping TYPE. */
+
+static void
+delete_addressof_map (tree from_type)
+{
+ int uid = TYPE_UID (TYPE_MAIN_VARIANT (from_type));
+ splay_tree_node result =
+ splay_tree_lookup (uid_to_addressof_map, (splay_tree_key) uid);
+
+ if (result)
+ {
+ bitmap map = (bitmap) result->value;
+ BITMAP_XFREE (map);
+ splay_tree_remove (uid_to_addressof_map, (splay_tree_key) uid);
+ }
+}
+
+/* Transitively close the addressof bitmap for the type with UID.
+ This means that if we had a.b and b.c, a would have both b an c in
+ it's maps. */
+
+static bitmap
+close_addressof (int uid)
+{
+ bitmap_iterator bi;
+ splay_tree_node result =
+ splay_tree_lookup (uid_to_addressof_map, (splay_tree_key) uid);
+ bitmap map = NULL;
+ bitmap new_map;
+ unsigned int i;
+
+ if (result)
+ map = (bitmap) result->value;
+ else
+ return NULL;
+
+ if (bitmap_bit_p (been_there_done_that, uid))
+ return map;
+ bitmap_set_bit (been_there_done_that, uid);
+
+ /* The new_map will have all of the bits for the enclosed fields and
+ will have the unique id version of the old map. */
+ new_map = BITMAP_ALLOC (&ipa_obstack);
+
+ EXECUTE_IF_SET_IN_BITMAP (map, 0, i, bi)
+ {
+ int new_uid = unique_type_id_for (i);
+ bitmap submap = close_addressof (new_uid);
+ bitmap_set_bit (new_map, new_uid);
+ if (submap)
+ bitmap_ior_into (new_map, submap);
+ }
+ result->value = (splay_tree_value) new_map;
+
+ BITMAP_FREE (map);
+ return new_map;
+}
+
+/* Do all of the closures to discover which types escape the
+ compilation unit. */
+
+static void
+do_type_analysis (void)
+{
+ unsigned int i;
+ bitmap_iterator bi;
+ splay_tree_node result;
+
+ been_there_done_that = BITMAP_ALLOC (&ipa_obstack);
+
+ /* Examine the types that we have directly seen in scanning the code
+ and add to that any contained types or superclasses. */
+
+ EXECUTE_IF_SET_IN_BITMAP (global_types_seen, 0, i, bi)
+ {
+ tree type = type_for_uid (i);
+ /* Only look at records and unions with no pointer tos. */
+ if (ipa_static_star_count_of_interesting_or_array_type (type) == 0)
+ close_type_seen (type);
+ }
+ bitmap_clear (been_there_done_that);
+
+ /* Map the duplicate types to a single unique type. This is a hack
+ it is not a general algorithm. */
+ uid_to_unique_type = splay_tree_new (splay_tree_compare_ints, 0, 0);
+ all_unique_types = splay_tree_new (compare_type_brand, 0, 0);
+
+ EXECUTE_IF_SET_IN_BITMAP (global_types_seen, 0, i, bi)
+ {
+ tree unique_type = discover_unique_type (type_for_uid (i));
+ splay_tree_insert (uid_to_unique_type,
+ (splay_tree_key) i,
+ (splay_tree_value) unique_type);
+ if (0)
+ fprintf(stderr, "dta i=%d,%d j=%d,%s\n", i,
+ TYPE_UID(type_for_uid(i)),
+ TYPE_UID(unique_type), get_name_of_type(unique_type));
+ }
+
+ /* Get rid of the temporary data structures used to find the unique
+ type. */
+ result = splay_tree_min (all_unique_types);
+ while (result)
+ {
+ struct type_brand_s * b = (struct type_brand_s *) result->key;
+ splay_tree_remove (all_unique_types, result->key);
+ free (b);
+ result = splay_tree_min (all_unique_types);
+ }
+ splay_tree_delete (all_unique_types);
+ all_unique_types = NULL;
+
+ /* Examine all of the types passed by value and mark any enclosed
+ pointer types as escaping. */
+
+ EXECUTE_IF_SET_IN_BITMAP (global_types_exposed_parameter, 0, i, bi)
+ {
+ close_type_exposed_parameter (type_for_uid (i));
+ }
+ bitmap_clear (been_there_done_that);
+
+ /* Close the types for escape. If something escapes, then any
+ enclosed types escape as well as any subtypes. */
+
+ EXECUTE_IF_SET_IN_BITMAP (global_types_full_escape, 0, i, bi)
+ {
+ tree type = type_for_uid (i);
+ close_type_full_escape (type);
+ }
+ bitmap_clear (been_there_done_that);
+
+ result = splay_tree_min (uid_to_addressof_map);
+ while (result)
+ {
+ int uid = result->key;
+ tree type = type_for_uid (uid);
+ if (bitmap_bit_p (global_types_full_escape, uid))
+ /* If the type escaped, we will never use the map, so get rid
+ of it. */
+ delete_addressof_map (type);
+ else
+ {
+ if (unique_type_id_p (uid))
+ /* Close the addressof map, i.e. copy all of the
+ transitive substructures up to this level. */
+ close_addressof (uid);
+ else
+ /* This type is not the unique designate, so get rid of
+ it. */
+ delete_addressof_map (type);
+ }
+ result = splay_tree_successor (uid_to_addressof_map, uid);
+ }
+
+ /* If a type is set in global_types_full_escape, make sure that the
+ unique type is also set in that map. */
+ EXECUTE_IF_SET_IN_BITMAP (global_types_full_escape, 0, i, bi)
+ {
+ unsigned int j = unique_type_id_for (i);
+ if (i != j)
+ {
+ bitmap_set_bit(global_types_full_escape, j);
+ bitmap_clear_bit(global_types_full_escape, i);
+ }
+ }
+
+ if (0)
+ {
+ EXECUTE_IF_SET_IN_BITMAP (global_types_seen, 0, i, bi)
+ {
+ /* The pointer types are in the global_types_full_escape bitmap
+ but not in the backwards map. */
+ tree type = type_for_uid (i);
+ fprintf(stderr, "type %d ", i);
+ print_generic_expr (stderr, type, 0);
+ if (bitmap_bit_p (global_types_full_escape, i))
+ fprintf(stderr, " escaped\n");
+ else if (unique_type_id_p (i))
+ fprintf(stderr, " contained\n");
+ else
+ fprintf(stderr, " replaced\n");
}
}
+
+ /* Get rid of the subtype map. */
+ result = splay_tree_min (uid_to_subtype_map);
+ while (result)
+ {
+ bitmap b = (bitmap)result->value;
+ BITMAP_XFREE(b);
+ splay_tree_remove (uid_to_subtype_map, result->key);
+ result = splay_tree_min (uid_to_subtype_map);
+ }
+ splay_tree_delete (uid_to_subtype_map);
+ uid_to_subtype_map = NULL;
+
+ BITMAP_XFREE (global_types_exposed_parameter);
+ BITMAP_XFREE (been_there_done_that);
+ BITMAP_XFREE (results_of_malloc);
+
+ cant_touch = 0;
}
+
/* Produce the global information by preforming a transitive closure
on the local information that was produced by ipa_analyze_function
and ipa_analyze_variable. */
-
static void
static_execute (void)
{
@@ -1527,16 +2819,6 @@ static_execute (void)
break;
}
-/* if (l->pure_const_state == IPA_PURE) */
-/* { */
-/* fprintf (stderr, " before %s(%d)=%d\n", cgraph_node_name (node), */
-/* node->uid, l->pure_const_state); */
-/* l->pure_const_state = IPA_NEITHER; */
-
-
-/* } */
-
-
/* Any variables that are not in all_module_statics are
removed from the local maps. This will include all of the
variables that were found to escape in the function
@@ -1751,6 +3033,8 @@ static_execute (void)
}
}
+ do_type_analysis ();
+
/* Cleanup. */
for (i = 0; i < order_pos; i++ )
{
@@ -1767,12 +3051,6 @@ static_execute (void)
node_g->statics_not_read_by_decl_uid = BITMAP_ALLOC (&ipa_obstack);
node_g->statics_not_written_by_decl_uid = BITMAP_ALLOC (&ipa_obstack);
- /* FIXME -- PROFILE-RESTRUCTURE: Delete next 4 assignments. */
- node_g->statics_read_by_ann_uid = BITMAP_ALLOC (&ipa_obstack);
- node_g->statics_written_by_ann_uid = BITMAP_ALLOC (&ipa_obstack);
- node_g->statics_not_read_by_ann_uid = BITMAP_ALLOC (&ipa_obstack);
- node_g->statics_not_written_by_ann_uid = BITMAP_ALLOC (&ipa_obstack);
-
if (node_g->statics_read_by_decl_uid != all_module_statics)
{
bitmap_and_compl (node_g->statics_not_read_by_decl_uid,
@@ -1785,6 +3063,12 @@ static_execute (void)
all_module_statics,
node_g->statics_written_by_decl_uid);
+ /* FIXME -- PROFILE-RESTRUCTURE: Delete next 4 assignments. */
+ node_g->statics_read_by_ann_uid = BITMAP_ALLOC (&ipa_obstack);
+ node_g->statics_written_by_ann_uid = BITMAP_ALLOC (&ipa_obstack);
+ node_g->statics_not_read_by_ann_uid = BITMAP_ALLOC (&ipa_obstack);
+ node_g->statics_not_written_by_ann_uid = BITMAP_ALLOC (&ipa_obstack);
+
w = node;
while (w)
{
diff -r -up -x CVS ../gccSBaseline/gcc/Makefile.in gcc/Makefile.in
--- ../gccSBaseline/gcc/Makefile.in 2005-01-21 22:01:08.000000000 -0500
+++ gcc/Makefile.in 2005-01-22 15:58:07.000000000 -0500
@@ -1759,7 +1759,7 @@ gimplify.o : gimplify.c $(CONFIG_H) $(SY
gimple-low.o : gimple-low.c $(CONFIG_H) $(SYSTEM_H) $(TREE_H) errors.h \
diagnostic.h $(TREE_GIMPLE_H) tree-inline.h varray.h langhooks.h \
langhooks-def.h $(TREE_FLOW_H) $(TIMEVAR_H) $(TM_H) coretypes.h except.h \
- $(FLAGS_H) $(RTL_H) function.h $(EXPR_H) tree-pass.h
+ $(FLAGS_H) $(RTL_H) function.h $(EXPR_H) tree-pass.h pointer-set.h
tree-browser.o : tree-browser.c tree-browser.def $(CONFIG_H) $(SYSTEM_H) \
$(TREE_H) errors.h tree-inline.h diagnostic.h $(HASHTAB_H) \
$(TM_H) coretypes.h
@@ -1937,7 +1937,7 @@ ipa_prop.o : ipa_prop.c $(CONFIG_H) $(SY
ipa-static-vars-anal.o : ipa-static-vars-anal.c $(CONFIG_H) $(SYSTEM_H) \
coretypes.h $(TM_H) $(TREE_H) $(TREE_FLOW_H) tree-inline.h langhooks.h \
pointer-set.h $(GGC_H) $(IPA_STATIC_H) $(C_COMMON_H) $(TREE_GIMPLE_H) \
- $(CGRAPH_H) output.h $(FLAGS_H) tree-pass.h
+ $(CGRAPH_H) output.h $(FLAGS_H) tree-pass.h $(DIAGNOSTIC_H)
ipa.o : ipa.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(CGRAPH_H)
ipa-inline.o : ipa-inline.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
langhooks.h tree-inline.h $(FLAGS_H) $(CGRAPH_H) intl.h $(TREE_FLOW_H) \
@@ -2138,7 +2138,7 @@ reorg.o : reorg.c $(CONFIG_H) $(SYSTEM_H
alias.o : alias.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(FLAGS_H) \
hard-reg-set.h $(BASIC_BLOCK_H) $(REGS_H) toplev.h output.h $(ALIAS_H) $(EMIT_RTL_H) \
$(GGC_H) function.h cselib.h $(TREE_H) $(TM_P_H) langhooks.h $(TARGET_H) \
- gt-alias.h $(TIMEVAR_H) $(CGRAPH_H)
+ gt-alias.h $(TIMEVAR_H) $(CGRAPH_H) ipa-static.h
regmove.o : regmove.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) insn-config.h \
$(RECOG_H) output.h $(REGS_H) hard-reg-set.h $(FLAGS_H) function.h \
$(EXPR_H) $(BASIC_BLOCK_H) toplev.h $(TM_P_H) except.h reload.h
diff -r -up -x CVS ../gccSBaseline/gcc/matrix-reorg.c gcc/matrix-reorg.c
--- ../gccSBaseline/gcc/matrix-reorg.c 2004-12-28 17:05:18.000000000 -0500
+++ gcc/matrix-reorg.c 2005-01-22 20:59:01.000000000 -0500
@@ -1348,7 +1348,7 @@ flatten_allocation_site (struct matrix_i
call_exp = copy_node (mi->allocation_info->call_exp);
if (edge)
cgraph_clone_edge (edge, cgraph_node (current_function_decl),
- call_exp, REG_BR_PROB_BASE);
+ call_exp, REG_BR_PROB_BASE, /* FKZ HACK */ 0);
TREE_OPERAND (call_exp, 1) = copy_node (TREE_OPERAND (call_exp, 1));
TREE_VALUE (TREE_OPERAND (call_exp, 1)) = new_var;
diff -r -up -x CVS ../gccSBaseline/gcc/struct-reorg.c gcc/struct-reorg.c
--- ../gccSBaseline/gcc/struct-reorg.c 2005-01-21 22:02:44.000000000 -0500
+++ gcc/struct-reorg.c 2005-01-22 20:53:42.000000000 -0500
@@ -2199,7 +2199,7 @@ add_field_mallocs (tree cur_lhs,
c_node = cgraph_node (cur_malloc->context);
c_node2 = cgraph_node (malloc_fn_decl);
- cgraph_create_edge (c_node, c_node2, call_expr);
+ cgraph_create_edge (c_node, c_node2, call_expr, /*FKZ HACK*/0, 0);
add_field_mallocs (tmp_var3, field_type, struct_data, new_mallocs_list,
cur_malloc, malloc_fn_decl);
@@ -2271,7 +2271,7 @@ create_cascading_mallocs (struct malloc_
c_node = cgraph_node (cur_malloc->context);
c_node2 = cgraph_node (malloc_fn_decl);
- cgraph_create_edge (c_node, c_node2, call_expr);
+ cgraph_create_edge (c_node, c_node2, call_expr, /*FKZ HACK*/ 0, 0);
add_field_mallocs (cur_var->new_vars->data, new_struct_type, struct_data,
new_mallocs_list, cur_malloc, malloc_fn_decl);
@@ -2468,7 +2468,7 @@ create_new_mallocs (struct struct_list *
c_node = cgraph_node (cur_malloc->context);
c_node_2 = cgraph_node (malloc_fn_decl);
cgraph_create_edge (c_node, c_node_2,
- call_expr);
+ call_expr/*FKZ HACK*/, 0, 0);
}
else
{
diff -r -up -x CVS ../gccSBaseline/gcc/tree.c gcc/tree.c
--- ../gccSBaseline/gcc/tree.c 2005-01-21 22:02:55.000000000 -0500
+++ gcc/tree.c 2005-01-22 15:59:32.000000000 -0500
@@ -2674,6 +2674,7 @@ build4_stat (enum tree_code code, tree t
return t;
}
+
/* Backup definition for non-gcc build compilers. */
tree
diff -r -up -x CVS ../gccSBaseline/gcc/tree.def gcc/tree.def
--- ../gccSBaseline/gcc/tree.def 2005-01-21 22:02:55.000000000 -0500
+++ gcc/tree.def 2005-01-22 15:59:32.000000000 -0500
@@ -921,6 +921,13 @@ DEFTREECODE (WITH_SIZE_EXPR, "with_size_
generated by the builtin targetm.vectorize.mask_for_load_builtin_decl. */
DEFTREECODE (REALIGN_LOAD_EXPR, "realign_load", tcc_expression, 3)
+/* Array like memory addressing for pointers. Operands are SYMBOL
+ (static or global variable),INDEX (register), STEP (integer
+ constant) Corresponding address is SYMBOL + (INDEX * TYPE_SIZE_UNIT
+ (TREE_TYPE (SYMBOL))). */
+
+DEFTREECODE (MEM_REF, "mem_ref", tcc_reference, 2)
+
/*
Local variables:
mode:c
diff -r -up -x CVS ../gccSBaseline/gcc/tree-dfa.c gcc/tree-dfa.c
--- ../gccSBaseline/gcc/tree-dfa.c 2005-01-21 22:02:50.000000000 -0500
+++ gcc/tree-dfa.c 2005-01-22 15:59:22.000000000 -0500
@@ -450,7 +450,6 @@ create_stmt_ann (tree t)
return ann;
}
-
/* Create a new annotation for a tree T. */
tree_ann_t
diff -r -up -x CVS ../gccSBaseline/gcc/tree-eh.c gcc/tree-eh.c
--- ../gccSBaseline/gcc/tree-eh.c 2005-01-21 22:02:50.000000000 -0500
+++ gcc/tree-eh.c 2005-01-22 15:59:22.000000000 -0500
@@ -1833,6 +1833,7 @@ tree_could_trap_p (tree expr)
return !in_array_bounds_p (expr);
+ case MEM_REF:
case INDIRECT_REF:
case ALIGN_INDIRECT_REF:
case MISALIGNED_INDIRECT_REF:
diff -r -up -x CVS ../gccSBaseline/gcc/tree-flow.h gcc/tree-flow.h
--- ../gccSBaseline/gcc/tree-flow.h 2005-01-21 22:02:50.000000000 -0500
+++ gcc/tree-flow.h 2005-01-22 15:59:23.000000000 -0500
@@ -705,6 +705,7 @@ bool tree_duplicate_loop_to_header_edge
unsigned int *, int);
struct loop *tree_ssa_loop_version (struct loops *, struct loop *, tree,
basic_block *);
+void set_ref_original (tree, tree);
/* In tree-ssa-loop-im.c */
/* The possibilities of statement movement. */
diff -r -up -x CVS ../gccSBaseline/gcc/tree-flow-inline.h gcc/tree-flow-inline.h
--- ../gccSBaseline/gcc/tree-flow-inline.h 2005-01-21 22:02:50.000000000 -0500
+++ gcc/tree-flow-inline.h 2005-01-22 15:59:22.000000000 -0500
@@ -66,7 +66,6 @@ get_stmt_ann (tree stmt)
return (ann) ? ann : create_stmt_ann (stmt);
}
-
/* Return the annotation type for annotation ANN. */
static inline enum tree_ann_type
ann_type (tree_ann_t ann)
diff -r -up -x CVS ../gccSBaseline/gcc/tree-gimple.c gcc/tree-gimple.c
--- ../gccSBaseline/gcc/tree-gimple.c 2005-01-21 22:02:50.000000000 -0500
+++ gcc/tree-gimple.c 2005-01-22 15:59:23.000000000 -0500
@@ -160,8 +160,8 @@ is_gimple_condexpr (tree t)
bool
is_gimple_addressable (tree t)
{
- return (is_gimple_id (t) || handled_component_p (t)
- || INDIRECT_REF_P (t));
+ return (is_gimple_id (t) || handled_component_p (t)
+ || TREE_CODE (t) == MEM_REF || INDIRECT_REF_P (t));
}
/* Return true if T is function invariant. Or rather a restricted
@@ -379,7 +379,8 @@ bool
is_gimple_min_lval (tree t)
{
return (is_gimple_id (t)
- || TREE_CODE (t) == INDIRECT_REF);
+ || TREE_CODE (t) == INDIRECT_REF
+ || TREE_CODE (t) == MEM_REF);
}
/* Return true if T is a typecast operation. */
@@ -419,27 +420,6 @@ get_call_expr_in (tree t)
return NULL_TREE;
}
-/* Given a memory reference T, will return the variable at the bottom
- of the access. Unlike get_base_address below, this will recurse
- thru INDIRECT_REFS. */
-
-tree
-get_base_var (tree t)
-{
- if ((TREE_CODE (t) == EXC_PTR_EXPR) || (TREE_CODE (t) == FILTER_EXPR))
- return t;
-
- while (!SSA_VAR_P (t)
- && (!CONSTANT_CLASS_P (t))
- && TREE_CODE (t) != LABEL_DECL
- && TREE_CODE (t) != FUNCTION_DECL
- && TREE_CODE (t) != CONST_DECL)
- {
- t = TREE_OPERAND (t, 0);
- }
- return t;
-}
-
/* Given a memory reference expression T, return its base address.
The base address of a memory reference expression is the main
object being referenced. For instance, the base address for
@@ -458,7 +438,8 @@ get_base_address (tree t)
if (SSA_VAR_P (t)
|| TREE_CODE (t) == STRING_CST
|| TREE_CODE (t) == CONSTRUCTOR
- || INDIRECT_REF_P (t))
+ || INDIRECT_REF_P (t)
+ || TREE_CODE (t) == MEM_REF)
return t;
else
return NULL_TREE;
diff -r -up -x CVS ../gccSBaseline/gcc/tree.h gcc/tree.h
--- ../gccSBaseline/gcc/tree.h 2005-01-21 22:02:56.000000000 -0500
+++ gcc/tree.h 2005-01-22 17:07:46.000000000 -0500
@@ -1172,9 +1172,9 @@ struct tree_vec GTY(())
#define TREE_OPERAND(NODE, I) TREE_OPERAND_CHECK (NODE, I)
#define TREE_COMPLEXITY(NODE) (EXPR_CHECK (NODE)->exp.complexity)
-/* In INDIRECT_REF, ALIGN_INDIRECT_REF, MISALIGNED_INDIRECT_REF. */
-#define REF_ORIGINAL(NODE) TREE_CHAIN (TREE_CHECK3 (NODE, \
- INDIRECT_REF, ALIGN_INDIRECT_REF, MISALIGNED_INDIRECT_REF))
+/* In INDIRECT_REF, ALIGN_INDIRECT_REF, MISALIGNED_INDIRECT_REF, MEM_REF. */
+#define REF_ORIGINAL(NODE) TREE_CHAIN (TREE_CHECK4 (NODE, \
+ INDIRECT_REF, ALIGN_INDIRECT_REF, MISALIGNED_INDIRECT_REF, MEM_REF))
/* In a LOOP_EXPR node. */
#define LOOP_EXPR_BODY(NODE) TREE_OPERAND_CHECK_CODE (NODE, LOOP_EXPR, 0)
@@ -1241,6 +1241,10 @@ struct tree_vec GTY(())
#define CASE_HIGH(NODE) TREE_OPERAND ((NODE), 1)
#define CASE_LABEL(NODE) TREE_OPERAND ((NODE), 2)
+/* The operands of a MEM_REF. */
+#define MEM_REF_SYMBOL(NODE) (TREE_OPERAND (MEM_REF_CHECK (NODE), 0))
+#define MEM_REF_INDEX(NODE) (TREE_OPERAND (MEM_REF_CHECK (NODE), 1))
+
/* The operands of a BIND_EXPR. */
#define BIND_EXPR_VARS(NODE) (TREE_OPERAND (BIND_EXPR_CHECK (NODE), 0))
#define BIND_EXPR_BODY(NODE) (TREE_OPERAND (BIND_EXPR_CHECK (NODE), 1))
@@ -2162,9 +2166,17 @@ struct tree_binfo GTY (())
/* Nonzero in a FUNCTION_DECL means this function should be treated
as if it were a malloc, meaning it returns a pointer that is
- not an alias. */
+ not an alias. It also means that even though it returns a void*
+ pointer, the value returned does not cause the type to escape. */
#define DECL_IS_MALLOC(NODE) (FUNCTION_DECL_CHECK (NODE)->decl.malloc_flag)
+/* Nonzero in a FUNCTION_DECL means this function should be treated as
+ if pointers passed to it effectively die, meaning it takes a
+ pointer to a void* but the cast for this pararmeter does not cause
+ the type to escape. */
+#define DECL_IS_POINTER_NO_ESCAPE(NODE) \
+ (FUNCTION_DECL_CHECK (NODE)->decl.pointer_no_escape_flag)
+
/* Nonzero in a FUNCTION_DECL means this function should be treated
as "pure" function (like const function, but may read global memory). */
#define DECL_IS_PURE(NODE) (FUNCTION_DECL_CHECK (NODE)->decl.pure_flag)
@@ -2370,7 +2382,8 @@ struct tree_decl GTY(())
unsigned possibly_inlined : 1;
unsigned preserve_flag: 1;
unsigned gimple_formal_temp : 1;
- /* 13 unused bits. */
+ unsigned pointer_no_escape_flag : 1;
+ /* 11 unused bits. */
union tree_decl_u1 {
/* In a FUNCTION_DECL for which DECL_BUILT_IN holds, this is
@@ -3679,6 +3692,9 @@ extern rtx emit_line_note (location_t);
#define ECF_ALWAYS_RETURN 512
/* Create libcall block around the call. */
#define ECF_LIBCALL_BLOCK 1024
+/* Nonzero if this is a call to a function that does not allow the
+ pointers to escape or a related function. */
+#define ECF_POINTER_NO_ESCAPE 2048
extern int flags_from_decl_or_type (tree);
extern int call_expr_flags (tree);
@@ -3799,7 +3815,7 @@ extern tree walk_tree_without_duplicates
/* In tree-dump.c */
/* Different tree dump places. When you add new tree dump places,
- extend the DUMP_FILES array in tree-dump.c. */
+/ extend the DUMP_FILES array in tree-dump.c. */
enum tree_dump_index
{
TDI_none, /* No dump */
@@ -3930,7 +3946,6 @@ extern bool in_gimple_form;
extern bool thread_through_all_blocks (void);
/* In tree-gimple.c. */
-extern tree get_base_var (tree t);
extern tree get_base_address (tree t);
#endif /* GCC_TREE_H */
diff -r -up -x CVS ../gccSBaseline/gcc/tree-inline.c gcc/tree-inline.c
--- ../gccSBaseline/gcc/tree-inline.c 2005-01-21 22:02:50.000000000 -0500
+++ gcc/tree-inline.c 2005-01-22 15:59:23.000000000 -0500
@@ -720,7 +720,7 @@ copy_body_r (tree *tp, int *walk_subtree
edge = cgraph_edge (id->current_node, old_node);
if (edge)
- cgraph_clone_edge (edge, id->node, *tp, REG_BR_PROB_BASE);
+ cgraph_clone_edge (edge, id->node, *tp, REG_BR_PROB_BASE, 1);
}
}
else if (TREE_CODE (*tp) == RESX_EXPR)
@@ -1724,6 +1724,7 @@ estimate_num_insns_1 (tree *tp, int *wal
case BLOCK:
case COMPONENT_REF:
case BIT_FIELD_REF:
+ case MEM_REF:
case INDIRECT_REF:
case ARRAY_REF:
case ARRAY_RANGE_REF:
@@ -2098,7 +2099,8 @@ expand_call_inline (tree *tp, int *walk_
constant propagating arguments. In all other cases we hit a bug
(incorrect node sharing is most common reason for missing edges. */
gcc_assert (dest->needed || !flag_unit_at_a_time);
- cgraph_create_edge (id->node, dest, t)->inline_failed
+ cgraph_create_edge (id->node, dest, t, id->oic_basic_block->count,
+ id->oic_basic_block->loop_depth)->inline_failed
= N_("originally indirect function call not considered for inlining");
goto egress;
}
diff -r -up -x CVS ../gccSBaseline/gcc/tree-optimize.c gcc/tree-optimize.c
--- ../gccSBaseline/gcc/tree-optimize.c 2005-01-21 22:02:51.000000000 -0500
+++ gcc/tree-optimize.c 2005-01-22 15:59:25.000000000 -0500
@@ -402,6 +402,7 @@ init_tree_optimization_passes (void)
NEXT_PASS (pass_init_datastructures);
NEXT_PASS (pass_all_optimizations);
NEXT_PASS (pass_warn_function_return);
+ NEXT_PASS (pass_lower_memref);
NEXT_PASS (pass_mudflap_2);
NEXT_PASS (pass_free_datastructures);
NEXT_PASS (pass_expand);
@@ -411,10 +412,12 @@ init_tree_optimization_passes (void)
p = &pass_all_optimizations.sub;
NEXT_PASS (pass_referenced_vars);
NEXT_PASS (pass_maybe_create_global_var);
+ NEXT_PASS (pass_lower_memref);
NEXT_PASS (pass_build_ssa);
NEXT_PASS (pass_may_alias);
NEXT_PASS (pass_rename_ssa_copies);
NEXT_PASS (pass_early_warn_uninitialized);
+ NEXT_PASS (pass_eliminate_useless_stores);
NEXT_PASS (pass_dce);
NEXT_PASS (pass_dominator);
NEXT_PASS (pass_redundant_phi);
@@ -696,9 +699,12 @@ tree_lowering_passes (tree fn)
current_function_decl = fn;
push_cfun (DECL_STRUCT_FUNCTION (fn));
tree_register_cfg_hooks ();
+ bitmap_obstack_initialize (NULL);
execute_pass_list (all_lowering_passes, EXECUTE_HOOK, NULL, NULL);
- current_function_decl = saved_current_function_decl;
+ free_dominance_info (CDI_POST_DOMINATORS);
compact_blocks ();
+ current_function_decl = saved_current_function_decl;
+ bitmap_obstack_release (NULL);
pop_cfun ();
}
@@ -709,10 +715,14 @@ tree_early_local_passes (tree fn)
current_function_decl = fn;
push_cfun (DECL_STRUCT_FUNCTION (fn));
+ bitmap_obstack_initialize (NULL);
tree_register_cfg_hooks ();
execute_pass_list (all_early_local_passes, EXECUTE_HOOK, NULL, NULL);
- current_function_decl = saved_current_function_decl;
+ free_dominance_info (CDI_DOMINATORS);
+ free_dominance_info (CDI_POST_DOMINATORS);
compact_blocks ();
+ current_function_decl = saved_current_function_decl;
+ bitmap_obstack_release (NULL);
pop_cfun ();
}
@@ -798,7 +808,7 @@ tree_rest_of_compilation (tree fndecl)
struct cgraph_edge *e;
node = cgraph_node (current_function_decl);
- saved_node = cgraph_clone_node (node, node->count);
+ saved_node = cgraph_clone_node (node, node->count, 1);
for (e = saved_node->callees; e; e = e->next_callee)
if (!e->inline_failed)
cgraph_clone_inlined_nodes (e, true);
diff -r -up -x CVS ../gccSBaseline/gcc/tree-pass.h gcc/tree-pass.h
--- ../gccSBaseline/gcc/tree-pass.h 2005-01-21 22:02:51.000000000 -0500
+++ gcc/tree-pass.h 2005-01-22 15:59:25.000000000 -0500
@@ -193,8 +193,9 @@ extern struct tree_opt_pass pass_expand;
extern struct tree_opt_pass pass_rest_of_compilation;
extern struct tree_opt_pass pass_fre;
extern struct tree_opt_pass pass_linear_transform;
+extern struct tree_opt_pass pass_eliminate_useless_stores;
extern struct tree_opt_pass pass_maybe_create_global_var;
-
+extern struct tree_opt_pass pass_lower_memref;
extern struct tree_opt_pass pass_ipa_inline;
extern struct tree_opt_pass pass_ipa_static;
diff -r -up -x CVS ../gccSBaseline/gcc/tree-pretty-print.c gcc/tree-pretty-print.c
--- ../gccSBaseline/gcc/tree-pretty-print.c 2005-01-21 22:02:51.000000000 -0500
+++ gcc/tree-pretty-print.c 2005-01-22 15:59:26.000000000 -0500
@@ -444,6 +444,15 @@ dump_generic_node (pretty_printer *buffe
NIY;
break;
+ case MEM_REF:
+ {
+ dump_generic_node (buffer, MEM_REF_SYMBOL (node), spc, flags, false);
+ pp_string (buffer, "[");
+ dump_generic_node (buffer, MEM_REF_INDEX (node), spc, flags, false);
+ pp_string (buffer, "]");
+ }
+ break;
+
case ARRAY_TYPE:
{
tree tmp;
diff -r -up -x CVS ../gccSBaseline/gcc/tree-ssa-alias.c gcc/tree-ssa-alias.c
--- ../gccSBaseline/gcc/tree-ssa-alias.c 2005-01-21 22:02:51.000000000 -0500
+++ gcc/tree-ssa-alias.c 2005-01-22 15:59:27.000000000 -0500
@@ -42,7 +42,7 @@ Boston, MA 02111-1307, USA. */
#include "tree-pass.h"
#include "convert.h"
#include "params.h"
-
+#include "ipa-static.h"
/* Structure to map a variable to its alias set and keep track of the
virtual operands that will be needed to represent it. */
@@ -124,6 +124,8 @@ struct alias_stats_d
unsigned int simple_resolved;
unsigned int tbaa_queries;
unsigned int tbaa_resolved;
+ unsigned int structnoaddress_queries;
+ unsigned int structnoaddress_resolved;
};
@@ -575,6 +577,9 @@ find_ptr_dereference (tree *tp, int *wal
if (INDIRECT_REF_P (*tp)
&& TREE_OPERAND (*tp, 0) == ptr)
return *tp;
+ else if (TREE_CODE (*tp) == MEM_REF
+ && MEM_REF_SYMBOL (*tp) == ptr)
+ return *tp;
return NULL_TREE;
}
@@ -1638,6 +1643,63 @@ may_alias_p (tree ptr, HOST_WIDE_INT mem
return false;
}
+ /* If var is a record or union type, ptr cannot point into var
+ unless there is some operation explicit address operation in the
+ program that can reference a field of the ptr's dereferenced
+ type. This also assumes that the types of both var and ptr are
+ contained within the compilation unit, and that there is no fancy
+ addressing arithmetic associated with any of the types
+ involved. */
+
+ {
+ tree ptr_type = TREE_TYPE (ptr);
+
+ /* The star counts are -1 if the type at the end of the pointer_to
+ chain is not a record or union type. */
+ if (ipa_static_star_count_of_interesting_type (var) >= 0)
+ {
+ int ptr_star_count = 0;
+ /* ipa_static_star_count_of_interesting_type is a little too
+ restrictive for the pointer type, need to allow pointers to
+ primitive types as long as those types cannot be pointers
+ to everything. */
+ /* Strip the *'s off. */
+ while (POINTER_TYPE_P (ptr_type))
+ {
+ ptr_type = TREE_TYPE (ptr_type);
+ ptr_star_count++;
+ }
+
+ /* There does not appear to be a better test to see if the
+ pointer type was one of the pointer to everything
+ types. */
+ if (TREE_CODE (ptr_type) == CHAR_TYPE
+ && TREE_CODE (ptr_type) == VOID_TYPE)
+ ptr_star_count = -1;
+
+ if (ptr_star_count > 0)
+ {
+ alias_stats.structnoaddress_queries++;
+ if (ipa_static_address_not_taken_of_field (TREE_TYPE (var),
+ TREE_TYPE (ptr_type)))
+ {
+ alias_stats.structnoaddress_resolved++;
+ alias_stats.alias_noalias++;
+ return false;
+ }
+ }
+ else if (ptr_star_count == 0)
+ {
+ /* If ptr_type was not really a pointer to type, it cannot
+ alias. */
+ alias_stats.structnoaddress_queries++;
+ alias_stats.structnoaddress_resolved++;
+ alias_stats.alias_noalias++;
+ return false;
+ }
+ }
+ }
+
alias_stats.alias_mayalias++;
return true;
}
@@ -2236,6 +2298,10 @@ dump_alias_stats (FILE *file)
alias_stats.tbaa_queries);
fprintf (file, "Total TBAA resolved:\t%u\n",
alias_stats.tbaa_resolved);
+ fprintf (file, "Total non-addressable structure type queries:\t%u\n",
+ alias_stats.structnoaddress_queries);
+ fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
+ alias_stats.structnoaddress_resolved);
}
diff -r -up -x CVS ../gccSBaseline/gcc/tree-ssa-loop-im.c gcc/tree-ssa-loop-im.c
--- ../gccSBaseline/gcc/tree-ssa-loop-im.c 2005-01-21 22:02:52.000000000 -0500
+++ gcc/tree-ssa-loop-im.c 2005-01-22 15:59:29.000000000 -0500
@@ -174,6 +174,18 @@ for_each_index (tree *addr_p, bool (*cbc
case RESULT_DECL:
return true;
+ case MEM_REF:
+ idx = &MEM_REF_SYMBOL (*addr_p);
+ if (*idx
+ && !cbck (*addr_p, idx, data))
+ return false;
+ return true;
+ idx = &MEM_REF_INDEX (*addr_p);
+ if (*idx
+ && !cbck (*addr_p, idx, data))
+ return false;
+ return true;
+
default:
gcc_unreachable ();
}
diff -r -up -x CVS ../gccSBaseline/gcc/tree-ssa-operands.c gcc/tree-ssa-operands.c
--- ../gccSBaseline/gcc/tree-ssa-operands.c 2005-01-21 22:02:53.000000000 -0500
+++ gcc/tree-ssa-operands.c 2005-01-22 15:59:29.000000000 -0500
@@ -141,6 +141,7 @@ static void note_addressable (tree, stmt
static void get_expr_operands (tree, tree *, int);
static void get_asm_expr_operands (tree);
static void get_indirect_ref_operands (tree, tree, int);
+static void get_mem_ref_operands (tree, tree *, int);
static void get_call_expr_operands (tree, tree);
static inline void append_def (tree *);
static inline void append_use (tree *);
@@ -912,7 +913,9 @@ build_ssa_operands (tree stmt, stmt_ann_
if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
lhs = TREE_OPERAND (lhs, 0);
- if (TREE_CODE (lhs) != ARRAY_REF && TREE_CODE (lhs) != ARRAY_RANGE_REF
+ if (TREE_CODE (lhs) != ARRAY_REF
+ && TREE_CODE (lhs) != MEM_REF
+ && TREE_CODE (lhs) != ARRAY_RANGE_REF
&& TREE_CODE (lhs) != COMPONENT_REF
&& TREE_CODE (lhs) != BIT_FIELD_REF
&& TREE_CODE (lhs) != REALPART_EXPR
@@ -1092,6 +1095,10 @@ get_expr_operands (tree stmt, tree *expr
get_indirect_ref_operands (stmt, expr, flags);
return;
+ case MEM_REF:
+ get_mem_ref_operands (stmt, expr_p, flags);
+ return;
+
case ARRAY_REF:
case ARRAY_RANGE_REF:
/* Treat array references as references to the virtual variable
@@ -1167,6 +1174,7 @@ get_expr_operands (tree stmt, tree *expr
if (TREE_CODE (op) == WITH_SIZE_EXPR)
op = TREE_OPERAND (expr, 0);
if (TREE_CODE (op) == ARRAY_REF
+ || TREE_CODE (op) == MEM_REF
|| TREE_CODE (op) == ARRAY_RANGE_REF
|| TREE_CODE (op) == COMPONENT_REF
|| TREE_CODE (op) == REALPART_EXPR
@@ -1374,7 +1382,7 @@ get_indirect_ref_operands (tree stmt, tr
/* Stores into INDIRECT_REF operands are never killing definitions. */
flags &= ~opf_kill_def;
-
+
if (SSA_VAR_P (ptr))
{
struct ptr_info_def *pi = NULL;
@@ -1453,6 +1461,65 @@ get_indirect_ref_operands (tree stmt, tr
get_expr_operands (stmt, pptr, opf_none);
}
+/* A subroutine of get_expr_operands to handle MEM_REF. */
+
+static void
+get_mem_ref_operands (tree stmt, tree *expr_p, int flags)
+{
+ stmt_ann_t s_ann = stmt_ann (stmt);
+ tree expr = *expr_p;
+ tree ptr;
+
+ /* First record the real operands. */
+ get_expr_operands (stmt, &MEM_REF_INDEX (expr), opf_none);
+
+ ptr = MEM_REF_SYMBOL (expr);
+
+ if (SSA_VAR_P (ptr))
+ {
+ struct ptr_info_def *pi = NULL;
+
+ /* If PTR has flow-sensitive points-to information, use it. */
+ if (TREE_CODE (ptr) == SSA_NAME
+ && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
+ && pi->name_mem_tag)
+ {
+ /* PTR has its own memory tag. Use it. */
+ add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
+ }
+ else
+ {
+ /* If PTR is not an SSA_NAME or it doesn't have a name
+ tag, use its type memory tag. */
+ var_ann_t v_ann;
+
+ /* If we are emitting debugging dumps, display a warning if
+ PTR is an SSA_NAME with no flow-sensitive alias
+ information. That means that we may need to compute
+ aliasing again. */
+ if (dump_file
+ && TREE_CODE (ptr) == SSA_NAME
+ && pi == NULL)
+ {
+ fprintf (dump_file,
+ "NOTE: no flow-sensitive alias info for ");
+ print_generic_expr (dump_file, ptr, dump_flags);
+ fprintf (dump_file, " in ");
+ print_generic_stmt (dump_file, stmt, dump_flags);
+ }
+
+ if (TREE_CODE (ptr) == SSA_NAME)
+ ptr = SSA_NAME_VAR (ptr);
+ v_ann = var_ann (ptr);
+ if (v_ann->type_mem_tag)
+ add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
+ }
+ }
+ /* Add a use operand for the base pointer */
+ get_expr_operands (stmt, &MEM_REF_SYMBOL (expr), opf_none);
+
+}
+
/* A subroutine of get_expr_operands to handle CALL_EXPR. */
static void
diff -r -up -x CVS ../gccSBaseline/gcc/tree-ssa-pre.c gcc/tree-ssa-pre.c
--- ../gccSBaseline/gcc/tree-ssa-pre.c 2005-01-21 22:02:53.000000000 -0500
+++ gcc/tree-ssa-pre.c 2005-01-22 15:59:30.000000000 -0500
@@ -2175,3 +2175,132 @@ struct tree_opt_pass pass_fre =
TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
0 /* letter */
};
+
+/* Return true if T is a copy statement between two ssa names. */
+
+static bool
+is_copy_stmt (tree t)
+{
+
+ if (TREE_CODE (t) != MODIFY_EXPR)
+ return false;
+ if (TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
+ && TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME)
+ return true;
+ return false;
+}
+
+/* Starting from START, walk copy statements till we hit a statement with a
+ VUSE or a non-copy statement. */
+
+static tree
+follow_copies_till_vuse (tree start)
+{
+ if (NUM_VUSES (VUSE_OPS (stmt_ann (start))) != 0)
+ return start;
+ if (is_copy_stmt (start))
+ return follow_copies_till_vuse (SSA_NAME_DEF_STMT (TREE_OPERAND (start, 1)));
+ return NULL;
+}
+
+/* Gate and execute functions for eliminate useless stores.
+ The goal here is to recognize the pattern *x = ... *x, and eliminate the
+ store because the value hasn't changed. Store copy/const prop won't
+ do this because making *more* loads (IE propagating *x) is not a win, so it
+ ignores them.
+ This pass is currently geared completely towards static variable store
+ elimination.
+ */
+
+static void
+do_eustores (void)
+{
+ basic_block bb;
+ /* For each basic block
+ For each statement (STMT) in the block
+ if STMT is a stores of the pattern *x = y
+ follow the chain of definitions for y, until we hit a non-copy
+ statement or a statement with a vuse.
+ if the statement we arrive at is a vuse of the operand we killed,
+ accessed through the same memory operation, then we have a
+ useless store (because it is *x = ... = *x).
+ */
+
+ FOR_EACH_BB (bb)
+ {
+ block_stmt_iterator bsi;
+ for (bsi = bsi_start (bb);
+ !bsi_end_p (bsi);)
+ {
+ tree stmt = bsi_stmt (bsi);
+ stmt_ann_t ann = stmt_ann (stmt);
+ tree startat;
+ tree kill;
+ tree found;
+
+
+ if (NUM_V_MUST_DEFS (V_MUST_DEF_OPS (ann)) != 1
+ || TREE_CODE (stmt) != MODIFY_EXPR
+ || TREE_CODE (TREE_OPERAND (stmt, 1)) != SSA_NAME)
+ {
+ bsi_next (&bsi);
+ continue;
+ }
+ kill = V_MUST_DEF_KILL (V_MUST_DEF_OPS (ann), 0);
+ startat = TREE_OPERAND (stmt, 1);
+ startat = SSA_NAME_DEF_STMT (startat);
+ found = follow_copies_till_vuse (startat);
+ if (found)
+ {
+ stmt_ann_t foundann = stmt_ann (found);
+ if (NUM_VUSES (VUSE_OPS (foundann)) != 1
+ || VUSE_OP (VUSE_OPS (foundann), 0) != kill
+ || !operand_equal_p (TREE_OPERAND (found, 1), TREE_OPERAND (stmt, 0), 0))
+ {
+ bsi_next (&bsi);
+ continue;
+ }
+ if (dump_file)
+ {
+ fprintf (dump_file, "Eliminating useless store ");
+ print_generic_stmt (dump_file, stmt, 0);
+ }
+ bitmap_set_bit (vars_to_rename,
+ var_ann (TREE_OPERAND (stmt, 0))->uid);
+
+ bsi_remove (&bsi);
+ }
+ else
+ {
+ bsi_next (&bsi);
+ continue;
+ }
+ }
+
+ }
+}
+
+static bool
+gate_eustores(void)
+{
+ return flag_unit_at_a_time != 0;
+}
+
+struct tree_opt_pass pass_eliminate_useless_stores =
+{
+ "eustores", /* name */
+ gate_eustores, /* gate */
+ NULL, NULL, /* IPA analysis */
+ do_eustores, /* execute */
+ NULL, NULL, /* IPA modification */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_rename_vars | TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
+ 0 /* letter */
+};