]> gcc.gnu.org Git - gcc.git/blame - gcc/tree-ssa-operands.c
re PR fortran/26054 (Gratuitous warning about Fortran 2003 features w/o -std=...)
[gcc.git] / gcc / tree-ssa-operands.c
CommitLineData
6de9cd9a 1/* SSA operands management for trees.
20f06221 2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
6de9cd9a
DN
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING. If not, write to
366ccddb
KC
18the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19Boston, MA 02110-1301, USA. */
6de9cd9a
DN
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "flags.h"
27#include "function.h"
28#include "diagnostic.h"
29#include "tree-flow.h"
30#include "tree-inline.h"
31#include "tree-pass.h"
32#include "ggc.h"
33#include "timevar.h"
4c714dd4 34#include "toplev.h"
6674a6ce 35#include "langhooks.h"
ea900239 36#include "ipa-reference.h"
1a24f92f 37
6cb38cd4 38/* This file contains the code required to manage the operands cache of the
1a24f92f 39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
6cb38cd4 40 annotation. This cache contains operands that will be of interest to
1a24f92f
AM
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
1a24f92f 53 The operand tree is the parsed by the various get_* routines which look
2a7e31df 54 through the stmt tree for the occurrence of operands which may be of
1a24f92f
AM
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
59
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
62
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
66
67 If the stmt had a previous operand cache, the finalization routines
f3b569ca 68 attempt to match up the new operands with the old ones. If it's a perfect
1a24f92f
AM
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
74
454ff5cb 75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
1a24f92f
AM
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
78
79*/
80
81
1e6a5d3c 82/* Flags to describe operand properties in helpers. */
6de9cd9a
DN
83
84/* By default, operands are loaded. */
85#define opf_none 0
86
a32b97a2
BB
87/* Operand is the target of an assignment expression or a
88 call-clobbered variable */
6de9cd9a
DN
89#define opf_is_def (1 << 0)
90
a32b97a2 91/* Operand is the target of an assignment expression. */
50dc9a88 92#define opf_kill_def (1 << 1)
a32b97a2 93
6de9cd9a
DN
94/* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
99 VUSE for 'b'. */
50dc9a88 100#define opf_no_vops (1 << 2)
6de9cd9a 101
0d2bf6f0
RH
102/* Operand is a "non-specific" kill for call-clobbers and such. This is used
103 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
104#define opf_non_specific (1 << 3)
105
f47c96aa 106
6de9cd9a 107/* Array for building all the def operands. */
f3940b0e 108static VEC(tree,heap) *build_defs;
6de9cd9a
DN
109
110/* Array for building all the use operands. */
f3940b0e 111static VEC(tree,heap) *build_uses;
6de9cd9a 112
a32b97a2 113/* Array for building all the v_may_def operands. */
f3940b0e 114static VEC(tree,heap) *build_v_may_defs;
6de9cd9a
DN
115
116/* Array for building all the vuse operands. */
f3940b0e 117static VEC(tree,heap) *build_vuses;
6de9cd9a 118
a32b97a2 119/* Array for building all the v_must_def operands. */
f3940b0e 120static VEC(tree,heap) *build_v_must_defs;
a32b97a2 121
e288e2f5 122
6668f6a7 123/* These arrays are the cached operand vectors for call clobbered calls. */
f47c96aa 124static bool ops_active = false;
4c124b4c 125
f47c96aa
AM
126static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
127static unsigned operand_memory_index;
4c124b4c 128
1a24f92f
AM
129static void get_expr_operands (tree, tree *, int);
130static void get_asm_expr_operands (tree);
131static void get_indirect_ref_operands (tree, tree, int);
ac182688 132static void get_tmr_operands (tree, tree, int);
1a24f92f
AM
133static void get_call_expr_operands (tree, tree);
134static inline void append_def (tree *);
135static inline void append_use (tree *);
136static void append_v_may_def (tree);
137static void append_v_must_def (tree);
ea900239 138static void add_call_clobber_ops (tree, tree);
d16a5e36 139static void add_call_read_ops (tree, tree);
e288e2f5 140static void add_stmt_operand (tree *, stmt_ann_t, int);
f47c96aa
AM
141static void build_ssa_operands (tree stmt);
142
143static def_optype_p free_defs = NULL;
144static use_optype_p free_uses = NULL;
145static vuse_optype_p free_vuses = NULL;
146static maydef_optype_p free_maydefs = NULL;
147static mustdef_optype_p free_mustdefs = NULL;
1a24f92f 148
1a24f92f 149
c83eecad 150/* Return the DECL_UID of the base variable of T. */
1a24f92f 151
f47c96aa 152static inline unsigned
f3940b0e 153get_name_decl (tree t)
6de9cd9a 154{
f3940b0e
AM
155 if (TREE_CODE (t) != SSA_NAME)
156 return DECL_UID (t);
157 else
158 return DECL_UID (SSA_NAME_VAR (t));
6de9cd9a
DN
159}
160
f3940b0e 161/* Comparison function for qsort used in operand_build_sort_virtual. */
1a24f92f 162
f3940b0e
AM
163static int
164operand_build_cmp (const void *p, const void *q)
a32b97a2 165{
f3940b0e
AM
166 tree e1 = *((const tree *)p);
167 tree e2 = *((const tree *)q);
168 unsigned int u1,u2;
169
170 u1 = get_name_decl (e1);
171 u2 = get_name_decl (e2);
f47c96aa 172
f3940b0e 173 /* We want to sort in ascending order. They can never be equal. */
f47c96aa 174#ifdef ENABLE_CHECKING
f3940b0e 175 gcc_assert (u1 != u2);
f47c96aa 176#endif
f3940b0e 177 return (u1 > u2 ? 1 : -1);
a32b97a2
BB
178}
179
f3940b0e 180/* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
1a24f92f 181
6de9cd9a 182static inline void
f3940b0e 183operand_build_sort_virtual (VEC(tree,heap) *list)
6de9cd9a 184{
f3940b0e
AM
185 int num = VEC_length (tree, list);
186 if (num < 2)
187 return;
188 if (num == 2)
6de9cd9a 189 {
f3940b0e
AM
190 if (get_name_decl (VEC_index (tree, list, 0))
191 > get_name_decl (VEC_index (tree, list, 1)))
192 {
193 /* Swap elements if in the wrong order. */
194 tree tmp = VEC_index (tree, list, 0);
195 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
196 VEC_replace (tree, list, 1, tmp);
197 }
f47c96aa 198 return;
6de9cd9a 199 }
f3940b0e
AM
200 /* There are 3 or more elements, call qsort. */
201 qsort (VEC_address (tree, list),
202 VEC_length (tree, list),
203 sizeof (tree),
204 operand_build_cmp);
6de9cd9a
DN
205}
206
f430bae8 207
1a24f92f 208
f47c96aa 209/* Return true if the ssa operands cache is active. */
1a24f92f 210
f47c96aa
AM
211bool
212ssa_operands_active (void)
6de9cd9a 213{
f47c96aa
AM
214 return ops_active;
215}
6de9cd9a 216
d16a5e36
DB
217/* Structure storing statistics on how many call clobbers we have, and
218 how many where avoided. */
219static struct
220{
221 /* Number of call-clobbered ops we attempt to add to calls in
222 add_call_clobber_ops. */
223 unsigned int clobbered_vars;
224
225 /* Number of write-clobbers (v_may_defs) avoided by using
226 not_written information. */
227 unsigned int static_write_clobbers_avoided;
228
229 /* Number of reads (vuses) avoided by using not_read
230 information. */
231 unsigned int static_read_clobbers_avoided;
232
233 /* Number of write-clobbers avoided because the variable can't escape to
234 this call. */
235 unsigned int unescapable_clobbers_avoided;
6de9cd9a 236
d16a5e36
DB
237 /* Number of readonly uses we attempt to add to calls in
238 add_call_read_ops. */
239 unsigned int readonly_clobbers;
240
241 /* Number of readonly uses we avoid using not_read information. */
242 unsigned int static_readonly_clobbers_avoided;
243} clobber_stats;
244
f47c96aa
AM
245/* Initialize the operand cache routines. */
246
247void
248init_ssa_operands (void)
249{
f3940b0e
AM
250 build_defs = VEC_alloc (tree, heap, 5);
251 build_uses = VEC_alloc (tree, heap, 10);
252 build_vuses = VEC_alloc (tree, heap, 25);
253 build_v_may_defs = VEC_alloc (tree, heap, 25);
254 build_v_must_defs = VEC_alloc (tree, heap, 25);
255
f47c96aa
AM
256 gcc_assert (operand_memory == NULL);
257 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
258 ops_active = true;
d16a5e36
DB
259 memset (&clobber_stats, 0, sizeof (clobber_stats));
260
f47c96aa 261}
6de9cd9a 262
1a24f92f 263
f47c96aa
AM
264/* Dispose of anything required by the operand routines. */
265
266void
267fini_ssa_operands (void)
268{
269 struct ssa_operand_memory_d *ptr;
f3940b0e
AM
270 VEC_free (tree, heap, build_defs);
271 VEC_free (tree, heap, build_uses);
272 VEC_free (tree, heap, build_v_must_defs);
273 VEC_free (tree, heap, build_v_may_defs);
274 VEC_free (tree, heap, build_vuses);
f47c96aa
AM
275 free_defs = NULL;
276 free_uses = NULL;
277 free_vuses = NULL;
278 free_maydefs = NULL;
279 free_mustdefs = NULL;
280 while ((ptr = operand_memory) != NULL)
281 {
282 operand_memory = operand_memory->next;
283 ggc_free (ptr);
1a24f92f
AM
284 }
285
f47c96aa 286 ops_active = false;
d16a5e36
DB
287
288 if (dump_file && (dump_flags & TDF_STATS))
289 {
290 fprintf (dump_file, "Original clobbered vars:%d\n", clobber_stats.clobbered_vars);
291 fprintf (dump_file, "Static write clobbers avoided:%d\n", clobber_stats.static_write_clobbers_avoided);
292 fprintf (dump_file, "Static read clobbers avoided:%d\n", clobber_stats.static_read_clobbers_avoided);
293 fprintf (dump_file, "Unescapable clobbers avoided:%d\n", clobber_stats.unescapable_clobbers_avoided);
294 fprintf (dump_file, "Original readonly clobbers:%d\n", clobber_stats.readonly_clobbers);
295 fprintf (dump_file, "Static readonly clobbers avoided:%d\n", clobber_stats.static_readonly_clobbers_avoided);
296 }
f47c96aa 297}
1a24f92f 298
6de9cd9a 299
f47c96aa
AM
300/* Return memory for operands of SIZE chunks. */
301
302static inline void *
303ssa_operand_alloc (unsigned size)
304{
305 char *ptr;
306 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
307 {
308 struct ssa_operand_memory_d *ptr;
e1111e8e 309 ptr = GGC_NEW (struct ssa_operand_memory_d);
f47c96aa
AM
310 ptr->next = operand_memory;
311 operand_memory = ptr;
312 operand_memory_index = 0;
313 }
314 ptr = &(operand_memory->mem[operand_memory_index]);
315 operand_memory_index += size;
316 return ptr;
6de9cd9a
DN
317}
318
1a24f92f 319
2e48874f 320/* Make sure PTR is in the correct immediate use list. Since uses are simply
f430bae8
AM
321 pointers into the stmt TREE, there is no way of telling if anyone has
322 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
2e48874f 323 The contents are different, but the pointer is still the same. This
f430bae8 324 routine will check to make sure PTR is in the correct list, and if it isn't
3623aa70
AM
325 put it in the correct list. We cannot simply check the previous node
326 because all nodes in the same stmt might have be changed. */
f430bae8
AM
327
328static inline void
f47c96aa 329correct_use_link (use_operand_p ptr, tree stmt)
f430bae8 330{
f47c96aa 331 use_operand_p prev;
f430bae8
AM
332 tree root;
333
334 /* Fold_stmt () may have changed the stmt pointers. */
335 if (ptr->stmt != stmt)
336 ptr->stmt = stmt;
337
338 prev = ptr->prev;
339 if (prev)
340 {
a56b5394
AM
341 /* Find the root element, making sure we skip any safe iterators. */
342 while (prev->use != NULL || prev->stmt == NULL)
343 prev = prev->prev;
3623aa70
AM
344
345 /* Get the ssa_name of the list the node is in. */
a56b5394 346 root = prev->stmt;
f3b569ca 347 /* If it's the right list, simply return. */
f430bae8
AM
348 if (root == *(ptr->use))
349 return;
350 }
351 /* Its in the wrong list if we reach here. */
352 delink_imm_use (ptr);
353 link_imm_use (ptr, *(ptr->use));
354}
355
356
5dc2e333
AM
357/* This routine makes sure that PTR is in an immediate use list, and makes
358 sure the stmt pointer is set to the current stmt. Virtual uses do not need
359 the overhead of correct_use_link since they cannot be directly manipulated
360 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
361static inline void
362set_virtual_use_link (use_operand_p ptr, tree stmt)
363{
364 /* Fold_stmt () may have changed the stmt pointers. */
365 if (ptr->stmt != stmt)
366 ptr->stmt = stmt;
367
368 /* If this use isn't in a list, add it to the correct list. */
369 if (!ptr->prev)
370 link_imm_use (ptr, *(ptr->use));
371}
372
373
374
f47c96aa 375#define FINALIZE_OPBUILD build_defs
f3940b0e
AM
376#define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
377 build_defs, (I))
378#define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
379 build_defs, (I))
f47c96aa
AM
380#define FINALIZE_FUNC finalize_ssa_def_ops
381#define FINALIZE_ALLOC alloc_def
382#define FINALIZE_FREE free_defs
383#define FINALIZE_TYPE struct def_optype_d
384#define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
385#define FINALIZE_OPS DEF_OPS
386#define FINALIZE_BASE(VAR) VAR
387#define FINALIZE_BASE_TYPE tree *
388#define FINALIZE_BASE_ZERO NULL
389#define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
390#include "tree-ssa-opfinalize.h"
1a24f92f 391
f47c96aa
AM
392
393/* This routine will create stmt operands for STMT from the def build list. */
394
395static void
396finalize_ssa_defs (tree stmt)
6de9cd9a 397{
f3940b0e 398 unsigned int num = VEC_length (tree, build_defs);
f47c96aa
AM
399 /* There should only be a single real definition per assignment. */
400 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
6de9cd9a 401
f47c96aa
AM
402 /* If there is an old list, often the new list is identical, or close, so
403 find the elements at the beginning that are the same as the vector. */
404
405 finalize_ssa_def_ops (stmt);
f3940b0e 406 VEC_truncate (tree, build_defs, 0);
f47c96aa 407}
6de9cd9a 408
f47c96aa 409#define FINALIZE_OPBUILD build_uses
f3940b0e
AM
410#define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
411 build_uses, (I))
412#define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
413 build_uses, (I))
f47c96aa
AM
414#define FINALIZE_FUNC finalize_ssa_use_ops
415#define FINALIZE_ALLOC alloc_use
416#define FINALIZE_FREE free_uses
417#define FINALIZE_TYPE struct use_optype_d
418#define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
419#define FINALIZE_OPS USE_OPS
420#define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
5dc2e333 421#define FINALIZE_CORRECT_USE correct_use_link
f47c96aa
AM
422#define FINALIZE_BASE(VAR) VAR
423#define FINALIZE_BASE_TYPE tree *
424#define FINALIZE_BASE_ZERO NULL
425#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
426 (PTR)->use_ptr.use = (VAL); \
427 link_imm_use_stmt (&((PTR)->use_ptr), \
428 *(VAL), (STMT))
429#include "tree-ssa-opfinalize.h"
430
431/* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
432
433static void
434finalize_ssa_uses (tree stmt)
435{
6de9cd9a
DN
436#ifdef ENABLE_CHECKING
437 {
438 unsigned x;
f3940b0e 439 unsigned num = VEC_length (tree, build_uses);
f47c96aa 440
6de9cd9a 441 /* If the pointer to the operand is the statement itself, something is
f47c96aa
AM
442 wrong. It means that we are pointing to a local variable (the
443 initial call to get_stmt_operands does not pass a pointer to a
444 statement). */
6de9cd9a 445 for (x = 0; x < num; x++)
f3940b0e 446 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
6de9cd9a
DN
447 }
448#endif
f47c96aa 449 finalize_ssa_use_ops (stmt);
f3940b0e 450 VEC_truncate (tree, build_uses, 0);
6de9cd9a 451}
f47c96aa
AM
452
453
454/* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
455#define FINALIZE_OPBUILD build_v_may_defs
f3940b0e
AM
456#define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
457#define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
458 build_v_may_defs, (I)))
f47c96aa
AM
459#define FINALIZE_FUNC finalize_ssa_v_may_def_ops
460#define FINALIZE_ALLOC alloc_maydef
461#define FINALIZE_FREE free_maydefs
462#define FINALIZE_TYPE struct maydef_optype_d
463#define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
464#define FINALIZE_OPS MAYDEF_OPS
465#define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
5dc2e333 466#define FINALIZE_CORRECT_USE set_virtual_use_link
f47c96aa 467#define FINALIZE_BASE_ZERO 0
f3940b0e 468#define FINALIZE_BASE(VAR) get_name_decl (VAR)
f47c96aa
AM
469#define FINALIZE_BASE_TYPE unsigned
470#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
471 (PTR)->def_var = (VAL); \
472 (PTR)->use_var = (VAL); \
473 (PTR)->use_ptr.use = &((PTR)->use_var); \
474 link_imm_use_stmt (&((PTR)->use_ptr), \
475 (VAL), (STMT))
476#include "tree-ssa-opfinalize.h"
477
478
479static void
480finalize_ssa_v_may_defs (tree stmt)
6de9cd9a 481{
f47c96aa 482 finalize_ssa_v_may_def_ops (stmt);
6de9cd9a 483}
f47c96aa 484
6de9cd9a 485
e288e2f5
AM
486/* Clear the in_list bits and empty the build array for v_may_defs. */
487
488static inline void
489cleanup_v_may_defs (void)
490{
491 unsigned x, num;
f3940b0e 492 num = VEC_length (tree, build_v_may_defs);
e288e2f5
AM
493
494 for (x = 0; x < num; x++)
495 {
f3940b0e 496 tree t = VEC_index (tree, build_v_may_defs, x);
f47c96aa
AM
497 if (TREE_CODE (t) != SSA_NAME)
498 {
499 var_ann_t ann = var_ann (t);
500 ann->in_v_may_def_list = 0;
501 }
e288e2f5 502 }
f3940b0e 503 VEC_truncate (tree, build_v_may_defs, 0);
f47c96aa
AM
504}
505
506
507#define FINALIZE_OPBUILD build_vuses
f3940b0e
AM
508#define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
509#define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
510 build_vuses, (I)))
f47c96aa
AM
511#define FINALIZE_FUNC finalize_ssa_vuse_ops
512#define FINALIZE_ALLOC alloc_vuse
513#define FINALIZE_FREE free_vuses
514#define FINALIZE_TYPE struct vuse_optype_d
515#define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
516#define FINALIZE_OPS VUSE_OPS
517#define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
5dc2e333 518#define FINALIZE_CORRECT_USE set_virtual_use_link
f47c96aa 519#define FINALIZE_BASE_ZERO 0
f3940b0e 520#define FINALIZE_BASE(VAR) get_name_decl (VAR)
f47c96aa
AM
521#define FINALIZE_BASE_TYPE unsigned
522#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
523 (PTR)->use_var = (VAL); \
524 (PTR)->use_ptr.use = &((PTR)->use_var); \
525 link_imm_use_stmt (&((PTR)->use_ptr), \
526 (VAL), (STMT))
527#include "tree-ssa-opfinalize.h"
e288e2f5 528
1a24f92f 529
f47c96aa
AM
530/* Return a new vuse operand vector, comparing to OLD_OPS_P. */
531
532static void
533finalize_ssa_vuses (tree stmt)
1a24f92f 534{
f47c96aa 535 unsigned num, num_v_may_defs;
f3940b0e 536 unsigned vuse_index;
6de9cd9a
DN
537
538 /* Remove superfluous VUSE operands. If the statement already has a
a32b97a2
BB
539 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
540 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
6de9cd9a
DN
541 suppose that variable 'a' is aliased:
542
543 # VUSE <a_2>
a32b97a2 544 # a_3 = V_MAY_DEF <a_2>
6de9cd9a
DN
545 a = a + 1;
546
a32b97a2 547 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
6de9cd9a
DN
548 operation. */
549
f3940b0e
AM
550 num = VEC_length (tree, build_vuses);
551 num_v_may_defs = VEC_length (tree, build_v_may_defs);
1a24f92f 552
f47c96aa 553 if (num > 0 && num_v_may_defs > 0)
6de9cd9a 554 {
f3940b0e 555 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
f47c96aa
AM
556 {
557 tree vuse;
f3940b0e 558 vuse = VEC_index (tree, build_vuses, vuse_index);
e288e2f5 559 if (TREE_CODE (vuse) != SSA_NAME)
6de9cd9a 560 {
e288e2f5
AM
561 var_ann_t ann = var_ann (vuse);
562 ann->in_vuse_list = 0;
563 if (ann->in_v_may_def_list)
564 {
f3940b0e 565 VEC_ordered_remove (tree, build_vuses, vuse_index);
f47c96aa 566 continue;
6de9cd9a 567 }
6de9cd9a 568 }
f3940b0e 569 vuse_index++;
6de9cd9a
DN
570 }
571 }
e288e2f5
AM
572 else
573 /* Clear out the in_list bits. */
f3940b0e
AM
574 for (vuse_index = 0;
575 vuse_index < VEC_length (tree, build_vuses);
576 vuse_index++)
e288e2f5 577 {
f3940b0e 578 tree t = VEC_index (tree, build_vuses, vuse_index);
e288e2f5
AM
579 if (TREE_CODE (t) != SSA_NAME)
580 {
581 var_ann_t ann = var_ann (t);
582 ann->in_vuse_list = 0;
583 }
584 }
585
f47c96aa
AM
586 finalize_ssa_vuse_ops (stmt);
587 /* The v_may_def build vector wasn't cleaned up because we needed it. */
e288e2f5 588 cleanup_v_may_defs ();
f47c96aa
AM
589
590 /* Free the vuses build vector. */
f3940b0e 591 VEC_truncate (tree, build_vuses, 0);
1a24f92f 592
6de9cd9a 593}
f47c96aa 594
1a24f92f 595/* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
f47c96aa
AM
596
597#define FINALIZE_OPBUILD build_v_must_defs
f3940b0e
AM
598#define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
599#define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
600 build_v_must_defs, (I)))
f47c96aa
AM
601#define FINALIZE_FUNC finalize_ssa_v_must_def_ops
602#define FINALIZE_ALLOC alloc_mustdef
603#define FINALIZE_FREE free_mustdefs
604#define FINALIZE_TYPE struct mustdef_optype_d
605#define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
606#define FINALIZE_OPS MUSTDEF_OPS
607#define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
5dc2e333 608#define FINALIZE_CORRECT_USE set_virtual_use_link
f47c96aa 609#define FINALIZE_BASE_ZERO 0
f3940b0e 610#define FINALIZE_BASE(VAR) get_name_decl (VAR)
f47c96aa
AM
611#define FINALIZE_BASE_TYPE unsigned
612#define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
613 (PTR)->def_var = (VAL); \
614 (PTR)->kill_var = (VAL); \
615 (PTR)->use_ptr.use = &((PTR)->kill_var);\
616 link_imm_use_stmt (&((PTR)->use_ptr), \
617 (VAL), (STMT))
618#include "tree-ssa-opfinalize.h"
1a24f92f 619
a32b97a2 620
f47c96aa
AM
621static void
622finalize_ssa_v_must_defs (tree stmt)
623{
c75ab022
DB
624 /* In the presence of subvars, there may be more than one V_MUST_DEF per
625 statement (one for each subvar). It is a bit expensive to verify that
626 all must-defs in a statement belong to subvars if there is more than one
627 MUST-def, so we don't do it. Suffice to say, if you reach here without
628 having subvars, and have num >1, you have hit a bug. */
a32b97a2 629
f47c96aa 630 finalize_ssa_v_must_def_ops (stmt);
f3940b0e 631 VEC_truncate (tree, build_v_must_defs, 0);
a32b97a2
BB
632}
633
6de9cd9a 634
1a24f92f 635/* Finalize all the build vectors, fill the new ones into INFO. */
f47c96aa 636
1a24f92f 637static inline void
f47c96aa 638finalize_ssa_stmt_operands (tree stmt)
1a24f92f 639{
f47c96aa
AM
640 finalize_ssa_defs (stmt);
641 finalize_ssa_uses (stmt);
642 finalize_ssa_v_must_defs (stmt);
643 finalize_ssa_v_may_defs (stmt);
644 finalize_ssa_vuses (stmt);
6de9cd9a
DN
645}
646
647
1a24f92f
AM
648/* Start the process of building up operands vectors in INFO. */
649
650static inline void
651start_ssa_stmt_operands (void)
6de9cd9a 652{
f3940b0e
AM
653 gcc_assert (VEC_length (tree, build_defs) == 0);
654 gcc_assert (VEC_length (tree, build_uses) == 0);
655 gcc_assert (VEC_length (tree, build_vuses) == 0);
656 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
657 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
6de9cd9a
DN
658}
659
660
1a24f92f 661/* Add DEF_P to the list of pointers to operands. */
6de9cd9a
DN
662
663static inline void
1a24f92f 664append_def (tree *def_p)
6de9cd9a 665{
f3940b0e 666 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
6de9cd9a
DN
667}
668
669
1a24f92f 670/* Add USE_P to the list of pointers to operands. */
6de9cd9a
DN
671
672static inline void
1a24f92f 673append_use (tree *use_p)
6de9cd9a 674{
f3940b0e 675 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
6de9cd9a
DN
676}
677
678
1a24f92f 679/* Add a new virtual may def for variable VAR to the build array. */
6de9cd9a 680
1a24f92f
AM
681static inline void
682append_v_may_def (tree var)
6de9cd9a 683{
f47c96aa
AM
684 if (TREE_CODE (var) != SSA_NAME)
685 {
686 var_ann_t ann = get_var_ann (var);
6de9cd9a 687
f47c96aa
AM
688 /* Don't allow duplicate entries. */
689 if (ann->in_v_may_def_list)
690 return;
691 ann->in_v_may_def_list = 1;
692 }
6de9cd9a 693
f3940b0e 694 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
6de9cd9a
DN
695}
696
697
1a24f92f 698/* Add VAR to the list of virtual uses. */
6de9cd9a 699
1a24f92f
AM
700static inline void
701append_vuse (tree var)
6de9cd9a 702{
6de9cd9a
DN
703
704 /* Don't allow duplicate entries. */
e288e2f5
AM
705 if (TREE_CODE (var) != SSA_NAME)
706 {
707 var_ann_t ann = get_var_ann (var);
708
709 if (ann->in_vuse_list || ann->in_v_may_def_list)
710 return;
711 ann->in_vuse_list = 1;
712 }
6de9cd9a 713
f3940b0e 714 VEC_safe_push (tree, heap, build_vuses, (tree)var);
6de9cd9a
DN
715}
716
a32b97a2 717
1a24f92f 718/* Add VAR to the list of virtual must definitions for INFO. */
a32b97a2 719
1a24f92f
AM
720static inline void
721append_v_must_def (tree var)
722{
723 unsigned i;
a32b97a2
BB
724
725 /* Don't allow duplicate entries. */
f3940b0e
AM
726 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
727 if (var == VEC_index (tree, build_v_must_defs, i))
1a24f92f 728 return;
a32b97a2 729
f3940b0e 730 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
a32b97a2
BB
731}
732
6de9cd9a 733
f430bae8 734/* Parse STMT looking for operands. OLD_OPS is the original stmt operand
f652d14b 735 cache for STMT, if it existed before. When finished, the various build_*
f430bae8
AM
736 operand vectors will have potential operands. in them. */
737
d05eae88 738static void
f430bae8 739parse_ssa_operands (tree stmt)
6de9cd9a
DN
740{
741 enum tree_code code;
6de9cd9a
DN
742
743 code = TREE_CODE (stmt);
744 switch (code)
745 {
746 case MODIFY_EXPR:
9390c347
RK
747 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
748 either only part of LHS is modified or if the RHS might throw,
749 otherwise, use V_MUST_DEF.
750
751 ??? If it might throw, we should represent somehow that it is killed
752 on the fallthrough path. */
753 {
754 tree lhs = TREE_OPERAND (stmt, 0);
755 int lhs_flags = opf_is_def;
756
757 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
758
759 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
760 or not the entire LHS is modified; that depends on what's
761 inside the VIEW_CONVERT_EXPR. */
762 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
763 lhs = TREE_OPERAND (lhs, 0);
764
a916f21d 765 if (TREE_CODE (lhs) != ARRAY_RANGE_REF
8ae5e6f2 766 && TREE_CODE (lhs) != BIT_FIELD_REF)
9390c347
RK
767 lhs_flags |= opf_kill_def;
768
769 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
770 }
6de9cd9a
DN
771 break;
772
773 case COND_EXPR:
1a24f92f 774 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
6de9cd9a
DN
775 break;
776
777 case SWITCH_EXPR:
1a24f92f 778 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
6de9cd9a
DN
779 break;
780
781 case ASM_EXPR:
1a24f92f 782 get_asm_expr_operands (stmt);
6de9cd9a
DN
783 break;
784
785 case RETURN_EXPR:
1a24f92f 786 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
6de9cd9a
DN
787 break;
788
789 case GOTO_EXPR:
1a24f92f 790 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
6de9cd9a
DN
791 break;
792
793 case LABEL_EXPR:
1a24f92f 794 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
6de9cd9a
DN
795 break;
796
797 /* These nodes contain no variable references. */
798 case BIND_EXPR:
799 case CASE_LABEL_EXPR:
800 case TRY_CATCH_EXPR:
801 case TRY_FINALLY_EXPR:
802 case EH_FILTER_EXPR:
803 case CATCH_EXPR:
804 case RESX_EXPR:
805 break;
806
807 default:
808 /* Notice that if get_expr_operands tries to use &STMT as the operand
0e61db61 809 pointer (which may only happen for USE operands), we will fail in
77c9db77
RH
810 append_use. This default will handle statements like empty
811 statements, or CALL_EXPRs that may appear on the RHS of a statement
6de9cd9a 812 or as statements themselves. */
1a24f92f 813 get_expr_operands (stmt, &stmt, opf_none);
6de9cd9a
DN
814 break;
815 }
f430bae8
AM
816}
817
d14c5160 818/* Create an operands cache for STMT. */
f430bae8
AM
819
820static void
f47c96aa 821build_ssa_operands (tree stmt)
f430bae8 822{
f47c96aa 823 stmt_ann_t ann = get_stmt_ann (stmt);
f430bae8 824
ff88c5aa 825 /* Initially assume that the statement has no volatile operands. */
f430bae8 826 if (ann)
ff88c5aa 827 ann->has_volatile_ops = false;
f430bae8
AM
828
829 start_ssa_stmt_operands ();
830
831 parse_ssa_operands (stmt);
f3940b0e
AM
832 operand_build_sort_virtual (build_vuses);
833 operand_build_sort_virtual (build_v_may_defs);
834 operand_build_sort_virtual (build_v_must_defs);
f430bae8 835
f47c96aa 836 finalize_ssa_stmt_operands (stmt);
1a24f92f
AM
837}
838
839
840/* Free any operands vectors in OPS. */
6844185d 841void
1a24f92f
AM
842free_ssa_operands (stmt_operands_p ops)
843{
f47c96aa
AM
844 ops->def_ops = NULL;
845 ops->use_ops = NULL;
846 ops->maydef_ops = NULL;
847 ops->mustdef_ops = NULL;
848 ops->vuse_ops = NULL;
f47c96aa 849}
f47c96aa
AM
850
851
852/* Get the operands of statement STMT. Note that repeated calls to
853 get_stmt_operands for the same statement will do nothing until the
854 statement is marked modified by a call to mark_stmt_modified(). */
855
856void
857update_stmt_operands (tree stmt)
858{
859 stmt_ann_t ann = get_stmt_ann (stmt);
860 /* If get_stmt_operands is called before SSA is initialized, dont
861 do anything. */
862 if (!ssa_operands_active ())
863 return;
864 /* The optimizers cannot handle statements that are nothing but a
865 _DECL. This indicates a bug in the gimplifier. */
866 gcc_assert (!SSA_VAR_P (stmt));
867
868 gcc_assert (ann->modified);
869
870 timevar_push (TV_TREE_OPS);
871
872 build_ssa_operands (stmt);
873
874 /* Clear the modified bit for STMT. Subsequent calls to
875 get_stmt_operands for this statement will do nothing until the
876 statement is marked modified by a call to mark_stmt_modified(). */
877 ann->modified = 0;
878
879 timevar_pop (TV_TREE_OPS);
880}
881
882
883/* Copies virtual operands from SRC to DST. */
884
885void
886copy_virtual_operands (tree dest, tree src)
887{
888 tree t;
889 ssa_op_iter iter, old_iter;
890 use_operand_p use_p, u2;
891 def_operand_p def_p, d2;
892
893 build_ssa_operands (dest);
894
395bda42 895 /* Copy all the virtual fields. */
f47c96aa
AM
896 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
897 append_vuse (t);
898 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
899 append_v_may_def (t);
900 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
901 append_v_must_def (t);
902
f3940b0e
AM
903 if (VEC_length (tree, build_vuses) == 0
904 && VEC_length (tree, build_v_may_defs) == 0
905 && VEC_length (tree, build_v_must_defs) == 0)
f47c96aa
AM
906 return;
907
908 /* Now commit the virtual operands to this stmt. */
909 finalize_ssa_v_must_defs (dest);
910 finalize_ssa_v_may_defs (dest);
911 finalize_ssa_vuses (dest);
912
913 /* Finally, set the field to the same values as then originals. */
914
915
916 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
917 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
918 {
919 gcc_assert (!op_iter_done (&old_iter));
920 SET_USE (use_p, t);
921 t = op_iter_next_tree (&old_iter);
922 }
923 gcc_assert (op_iter_done (&old_iter));
924
925 op_iter_init_maydef (&old_iter, src, &u2, &d2);
926 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
927 {
928 gcc_assert (!op_iter_done (&old_iter));
929 SET_USE (use_p, USE_FROM_PTR (u2));
930 SET_DEF (def_p, DEF_FROM_PTR (d2));
931 op_iter_next_maymustdef (&u2, &d2, &old_iter);
932 }
933 gcc_assert (op_iter_done (&old_iter));
934
935 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
936 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
937 {
938 gcc_assert (!op_iter_done (&old_iter));
939 SET_USE (use_p, USE_FROM_PTR (u2));
940 SET_DEF (def_p, DEF_FROM_PTR (d2));
941 op_iter_next_maymustdef (&u2, &d2, &old_iter);
942 }
943 gcc_assert (op_iter_done (&old_iter));
944
1a24f92f
AM
945}
946
947
f47c96aa
AM
948/* Specifically for use in DOM's expression analysis. Given a store, we
949 create an artificial stmt which looks like a load from the store, this can
950 be used to eliminate redundant loads. OLD_OPS are the operands from the
951 store stmt, and NEW_STMT is the new load which represents a load of the
952 values stored. */
953
954void
955create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
956{
957 stmt_ann_t ann;
958 tree op;
959 ssa_op_iter iter;
960 use_operand_p use_p;
961 unsigned x;
962
963 ann = get_stmt_ann (new_stmt);
964
965 /* process the stmt looking for operands. */
966 start_ssa_stmt_operands ();
967 parse_ssa_operands (new_stmt);
968
f3940b0e 969 for (x = 0; x < VEC_length (tree, build_vuses); x++)
f47c96aa 970 {
f3940b0e 971 tree t = VEC_index (tree, build_vuses, x);
f47c96aa
AM
972 if (TREE_CODE (t) != SSA_NAME)
973 {
974 var_ann_t ann = var_ann (t);
975 ann->in_vuse_list = 0;
976 }
977 }
978
f3940b0e 979 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
f47c96aa 980 {
f3940b0e 981 tree t = VEC_index (tree, build_v_may_defs, x);
f47c96aa
AM
982 if (TREE_CODE (t) != SSA_NAME)
983 {
984 var_ann_t ann = var_ann (t);
985 ann->in_v_may_def_list = 0;
986 }
987 }
988 /* Remove any virtual operands that were found. */
f3940b0e
AM
989 VEC_truncate (tree, build_v_may_defs, 0);
990 VEC_truncate (tree, build_v_must_defs, 0);
991 VEC_truncate (tree, build_vuses, 0);
f47c96aa
AM
992
993 /* For each VDEF on the original statement, we want to create a
994 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
995 statement. */
996 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
997 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
998 append_vuse (op);
999
1000 /* Now build the operands for this new stmt. */
1001 finalize_ssa_stmt_operands (new_stmt);
1002
1003 /* All uses in this fake stmt must not be in the immediate use lists. */
1004 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
1005 delink_imm_use (use_p);
1006}
f430bae8 1007
3c7d0735 1008void
f47c96aa 1009swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
f430bae8
AM
1010{
1011 tree op0, op1;
1012 op0 = *exp0;
1013 op1 = *exp1;
1014
1015 /* If the operand cache is active, attempt to preserve the relative positions
1016 of these two operands in their respective immediate use lists. */
f47c96aa 1017 if (ssa_operands_active () && op0 != op1)
f430bae8 1018 {
f47c96aa
AM
1019 use_optype_p use0, use1, ptr;
1020 use0 = use1 = NULL;
f430bae8 1021 /* Find the 2 operands in the cache, if they are there. */
f47c96aa
AM
1022 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1023 if (USE_OP_PTR (ptr)->use == exp0)
f430bae8 1024 {
f47c96aa 1025 use0 = ptr;
f430bae8
AM
1026 break;
1027 }
f47c96aa
AM
1028 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1029 if (USE_OP_PTR (ptr)->use == exp1)
f430bae8 1030 {
f47c96aa 1031 use1 = ptr;
f430bae8
AM
1032 break;
1033 }
d566f6ef
KH
1034 /* If both uses don't have operand entries, there isn't much we can do
1035 at this point. Presumably we dont need to worry about it. */
f47c96aa 1036 if (use0 && use1)
f430bae8 1037 {
f47c96aa
AM
1038 tree *tmp = USE_OP_PTR (use1)->use;
1039 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1040 USE_OP_PTR (use0)->use = tmp;
f430bae8
AM
1041 }
1042 }
1043
1044 /* Now swap the data. */
1045 *exp0 = op1;
1046 *exp1 = op0;
1047}
1048
206048bd
VR
1049/* Recursively scan the expression pointed to by EXPR_P in statement referred
1050 to by INFO. FLAGS is one of the OPF_* constants modifying how to interpret
1051 the operands found. */
6de9cd9a
DN
1052
1053static void
1a24f92f 1054get_expr_operands (tree stmt, tree *expr_p, int flags)
6de9cd9a
DN
1055{
1056 enum tree_code code;
6615c446 1057 enum tree_code_class class;
6de9cd9a 1058 tree expr = *expr_p;
e288e2f5 1059 stmt_ann_t s_ann = stmt_ann (stmt);
6de9cd9a 1060
7d3bf067 1061 if (expr == NULL)
6de9cd9a
DN
1062 return;
1063
1064 code = TREE_CODE (expr);
1065 class = TREE_CODE_CLASS (code);
1066
310de761 1067 switch (code)
6de9cd9a 1068 {
310de761 1069 case ADDR_EXPR:
6de9cd9a 1070 /* Taking the address of a variable does not represent a
1a24f92f 1071 reference to it, but the fact that the stmt takes its address will be
6de9cd9a 1072 of interest to some passes (e.g. alias resolution). */
243cdfa8
ZD
1073 add_to_addressable_set (TREE_OPERAND (expr, 0),
1074 &s_ann->addresses_taken);
6de9cd9a 1075
d397dbcd
DN
1076 /* If the address is invariant, there may be no interesting variable
1077 references inside. */
1078 if (is_gimple_min_invariant (expr))
6de9cd9a
DN
1079 return;
1080
1081 /* There should be no VUSEs created, since the referenced objects are
1082 not really accessed. The only operands that we should find here
1083 are ARRAY_REF indices which will always be real operands (GIMPLE
1084 does not allow non-registers as array indices). */
1085 flags |= opf_no_vops;
1086
1a24f92f 1087 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
310de761 1088 return;
44de5aeb 1089
310de761 1090 case SSA_NAME:
326eda4b
DB
1091 case STRUCT_FIELD_TAG:
1092 case TYPE_MEMORY_TAG:
1093 case NAME_MEMORY_TAG:
8ae5e6f2
AP
1094
1095 add_stmt_operand (expr_p, s_ann, flags);
1096 return;
1097
310de761
RH
1098 case VAR_DECL:
1099 case PARM_DECL:
1100 case RESULT_DECL:
c75ab022
DB
1101 {
1102 subvar_t svars;
1103
1104 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1105 Otherwise, add the variable itself.
1106 Whether it goes to USES or DEFS depends on the operand flags. */
1107 if (var_can_have_subvars (expr)
1108 && (svars = get_subvars_for_var (expr)))
1109 {
1110 subvar_t sv;
1111 for (sv = svars; sv; sv = sv->next)
1112 add_stmt_operand (&sv->var, s_ann, flags);
1113 }
1114 else
1115 {
1116 add_stmt_operand (expr_p, s_ann, flags);
1117 }
1118 return;
1119 }
7ccf35ed
DN
1120 case MISALIGNED_INDIRECT_REF:
1121 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1122 /* fall through */
1123
1124 case ALIGN_INDIRECT_REF:
310de761 1125 case INDIRECT_REF:
1a24f92f 1126 get_indirect_ref_operands (stmt, expr, flags);
6de9cd9a 1127 return;
6de9cd9a 1128
ac182688
ZD
1129 case TARGET_MEM_REF:
1130 get_tmr_operands (stmt, expr, flags);
1131 return;
1132
310de761
RH
1133 case ARRAY_RANGE_REF:
1134 /* Treat array references as references to the virtual variable
1135 representing the array. The virtual variable for an ARRAY_REF
1136 is the VAR_DECL for the array. */
1137
6de9cd9a 1138 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
243cdfa8
ZD
1139 according to the value of IS_DEF. */
1140 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1a24f92f
AM
1141 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1142 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1143 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
6de9cd9a 1144 return;
6de9cd9a 1145
a916f21d 1146 case ARRAY_REF:
310de761
RH
1147 case COMPONENT_REF:
1148 case REALPART_EXPR:
1149 case IMAGPART_EXPR:
c75ab022
DB
1150 {
1151 tree ref;
6bec9271 1152 HOST_WIDE_INT offset, size, maxsize;
758cf3f2 1153 bool none = true;
c75ab022
DB
1154 /* This component ref becomes an access to all of the subvariables
1155 it can touch, if we can determine that, but *NOT* the real one.
1156 If we can't determine which fields we could touch, the recursion
1157 will eventually get to a variable and add *all* of its subvars, or
1158 whatever is the minimum correct subset. */
1159
6bec9271
RG
1160 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1161 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
c75ab022
DB
1162 {
1163 subvar_t svars = get_subvars_for_var (ref);
1164 subvar_t sv;
1165 for (sv = svars; sv; sv = sv->next)
1166 {
1167 bool exact;
6bec9271 1168 if (overlap_subvar (offset, maxsize, sv, &exact))
c75ab022 1169 {
98b6d477 1170 int subvar_flags = flags;
758cf3f2 1171 none = false;
6bec9271
RG
1172 if (!exact
1173 || size != maxsize)
7fac66d4
JH
1174 subvar_flags &= ~opf_kill_def;
1175 add_stmt_operand (&sv->var, s_ann, subvar_flags);
c75ab022
DB
1176 }
1177 }
758cf3f2
RG
1178 if (!none)
1179 flags |= opf_no_vops;
c75ab022 1180 }
758cf3f2
RG
1181
1182 /* Even if we found subvars above we need to ensure to see
1183 immediate uses for d in s.a[d]. In case of s.a having
1184 a subvar we'd miss it otherwise. */
1185 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1186 flags & ~opf_kill_def);
c75ab022
DB
1187
1188 if (code == COMPONENT_REF)
305a1321 1189 {
707db096 1190 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
305a1321
MM
1191 s_ann->has_volatile_ops = true;
1192 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1193 }
a916f21d
RG
1194 else if (code == ARRAY_REF)
1195 {
1196 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1197 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1198 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1199 }
c75ab022
DB
1200 return;
1201 }
d25cee4d 1202 case WITH_SIZE_EXPR:
0e28378a 1203 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
d25cee4d 1204 and an rvalue reference to its second argument. */
1a24f92f
AM
1205 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1206 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
d25cee4d
RH
1207 return;
1208
310de761 1209 case CALL_EXPR:
1a24f92f 1210 get_call_expr_operands (stmt, expr);
6de9cd9a 1211 return;
6de9cd9a 1212
40923b20 1213 case COND_EXPR:
ad9f20cb
DP
1214 case VEC_COND_EXPR:
1215 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
40923b20
DP
1216 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1217 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1218 return;
1219
310de761 1220 case MODIFY_EXPR:
d25cee4d
RH
1221 {
1222 int subflags;
1223 tree op;
1224
1a24f92f 1225 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
d25cee4d
RH
1226
1227 op = TREE_OPERAND (expr, 0);
1228 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1229 op = TREE_OPERAND (expr, 0);
a916f21d 1230 if (TREE_CODE (op) == ARRAY_RANGE_REF
d25cee4d
RH
1231 || TREE_CODE (op) == REALPART_EXPR
1232 || TREE_CODE (op) == IMAGPART_EXPR)
1233 subflags = opf_is_def;
1234 else
1235 subflags = opf_is_def | opf_kill_def;
1236
1a24f92f 1237 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
d25cee4d
RH
1238 return;
1239 }
6de9cd9a 1240
7b48e1e0
RH
1241 case CONSTRUCTOR:
1242 {
1243 /* General aggregate CONSTRUCTORs have been decomposed, but they
1244 are still in use as the COMPLEX_EXPR equivalent for vectors. */
4038c495
GB
1245 constructor_elt *ce;
1246 unsigned HOST_WIDE_INT idx;
7b48e1e0 1247
4038c495
GB
1248 for (idx = 0;
1249 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
1250 idx++)
1251 get_expr_operands (stmt, &ce->value, opf_none);
7b48e1e0
RH
1252
1253 return;
1254 }
1255
310de761
RH
1256 case TRUTH_NOT_EXPR:
1257 case BIT_FIELD_REF:
4626c433 1258 case VIEW_CONVERT_EXPR:
310de761 1259 do_unary:
1a24f92f 1260 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
6de9cd9a 1261 return;
6de9cd9a 1262
310de761
RH
1263 case TRUTH_AND_EXPR:
1264 case TRUTH_OR_EXPR:
1265 case TRUTH_XOR_EXPR:
1266 case COMPOUND_EXPR:
1267 case OBJ_TYPE_REF:
0bca51f0 1268 case ASSERT_EXPR:
310de761
RH
1269 do_binary:
1270 {
1a24f92f
AM
1271 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1272 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
310de761
RH
1273 return;
1274 }
1275
20f06221 1276 case DOT_PROD_EXPR:
7ccf35ed
DN
1277 case REALIGN_LOAD_EXPR:
1278 {
1279 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1280 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1281 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1282 return;
1283 }
1284
310de761
RH
1285 case BLOCK:
1286 case FUNCTION_DECL:
1287 case EXC_PTR_EXPR:
1288 case FILTER_EXPR:
1289 case LABEL_DECL:
243cdfa8 1290 case CONST_DECL:
50674e96
DN
1291 case OMP_PARALLEL:
1292 case OMP_SECTIONS:
1293 case OMP_FOR:
1294 case OMP_RETURN_EXPR:
1295 case OMP_SINGLE:
1296 case OMP_MASTER:
1297 case OMP_ORDERED:
1298 case OMP_CRITICAL:
310de761 1299 /* Expressions that make no memory references. */
6de9cd9a 1300 return;
310de761
RH
1301
1302 default:
6615c446 1303 if (class == tcc_unary)
310de761 1304 goto do_unary;
6615c446 1305 if (class == tcc_binary || class == tcc_comparison)
310de761 1306 goto do_binary;
6615c446 1307 if (class == tcc_constant || class == tcc_type)
310de761 1308 return;
6de9cd9a
DN
1309 }
1310
1311 /* If we get here, something has gone wrong. */
1e128c5f 1312#ifdef ENABLE_CHECKING
6de9cd9a
DN
1313 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1314 debug_tree (expr);
1315 fputs ("\n", stderr);
1e128c5f
GB
1316 internal_error ("internal error");
1317#endif
1318 gcc_unreachable ();
6de9cd9a
DN
1319}
1320
7c35745c 1321
6cb38cd4 1322/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
a6d02559
DN
1323
1324static void
1a24f92f 1325get_asm_expr_operands (tree stmt)
a6d02559 1326{
1a24f92f 1327 stmt_ann_t s_ann = stmt_ann (stmt);
a6d02559
DN
1328 int noutputs = list_length (ASM_OUTPUTS (stmt));
1329 const char **oconstraints
1330 = (const char **) alloca ((noutputs) * sizeof (const char *));
1331 int i;
1332 tree link;
1333 const char *constraint;
1334 bool allows_mem, allows_reg, is_inout;
a6d02559
DN
1335
1336 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1337 {
1338 oconstraints[i] = constraint
1339 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1340 parse_output_constraint (&constraint, i, 0, 0,
1341 &allows_mem, &allows_reg, &is_inout);
1342
a6d02559 1343 /* This should have been split in gimplify_asm_expr. */
1e128c5f 1344 gcc_assert (!allows_reg || !is_inout);
a6d02559
DN
1345
1346 /* Memory operands are addressable. Note that STMT needs the
1347 address of this operand. */
1348 if (!allows_reg && allows_mem)
1349 {
1350 tree t = get_base_address (TREE_VALUE (link));
e8ca4159
DN
1351 if (t && DECL_P (t) && s_ann)
1352 add_to_addressable_set (t, &s_ann->addresses_taken);
a6d02559
DN
1353 }
1354
1a24f92f 1355 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
a6d02559
DN
1356 }
1357
1358 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1359 {
1360 constraint
1361 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1362 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1363 oconstraints, &allows_mem, &allows_reg);
1364
1365 /* Memory operands are addressable. Note that STMT needs the
1366 address of this operand. */
1367 if (!allows_reg && allows_mem)
1368 {
1369 tree t = get_base_address (TREE_VALUE (link));
e8ca4159
DN
1370 if (t && DECL_P (t) && s_ann)
1371 add_to_addressable_set (t, &s_ann->addresses_taken);
a6d02559
DN
1372 }
1373
1a24f92f 1374 get_expr_operands (stmt, &TREE_VALUE (link), 0);
a6d02559
DN
1375 }
1376
7c35745c 1377
a6d02559 1378 /* Clobber memory for asm ("" : : : "memory"); */
7c35745c
DN
1379 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1380 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1381 {
3cd8c58a 1382 unsigned i;
87c476a2 1383 bitmap_iterator bi;
7c35745c 1384
7c35745c
DN
1385 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1386 decided to group them). */
1387 if (global_var)
e288e2f5 1388 add_stmt_operand (&global_var, s_ann, opf_is_def);
7c35745c 1389 else
87c476a2 1390 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
a3648cfc
DB
1391 {
1392 tree var = referenced_var (i);
1393 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1394 }
a6d02559 1395
7c35745c 1396 /* Now clobber all addressables. */
87c476a2 1397 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
7c35745c
DN
1398 {
1399 tree var = referenced_var (i);
d19e9499
DB
1400
1401 /* Subvars are explicitly represented in this list, so
1402 we don't need the original to be added to the clobber
1403 ops, but the original *will* be in this list because
1404 we keep the addressability of the original
1405 variable up-to-date so we don't screw up the rest of
1406 the backend. */
1407 if (var_can_have_subvars (var)
1408 && get_subvars_for_var (var) != NULL)
1409 continue;
1410
0d2bf6f0 1411 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
87c476a2 1412 }
a6d02559 1413
7c35745c
DN
1414 break;
1415 }
a6d02559
DN
1416}
1417
7ccf35ed
DN
1418/* A subroutine of get_expr_operands to handle INDIRECT_REF,
1419 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
310de761
RH
1420
1421static void
1a24f92f 1422get_indirect_ref_operands (tree stmt, tree expr, int flags)
310de761
RH
1423{
1424 tree *pptr = &TREE_OPERAND (expr, 0);
1425 tree ptr = *pptr;
e288e2f5 1426 stmt_ann_t s_ann = stmt_ann (stmt);
1a24f92f 1427
50dc9a88
DN
1428 /* Stores into INDIRECT_REF operands are never killing definitions. */
1429 flags &= ~opf_kill_def;
310de761
RH
1430
1431 if (SSA_VAR_P (ptr))
1432 {
c1b763fa
DN
1433 struct ptr_info_def *pi = NULL;
1434
1435 /* If PTR has flow-sensitive points-to information, use it. */
1436 if (TREE_CODE (ptr) == SSA_NAME
1437 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1438 && pi->name_mem_tag)
310de761 1439 {
c1b763fa 1440 /* PTR has its own memory tag. Use it. */
e288e2f5 1441 add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
310de761
RH
1442 }
1443 else
1444 {
c1b763fa
DN
1445 /* If PTR is not an SSA_NAME or it doesn't have a name
1446 tag, use its type memory tag. */
e288e2f5 1447 var_ann_t v_ann;
c1b763fa
DN
1448
1449 /* If we are emitting debugging dumps, display a warning if
1450 PTR is an SSA_NAME with no flow-sensitive alias
1451 information. That means that we may need to compute
1452 aliasing again. */
1453 if (dump_file
1454 && TREE_CODE (ptr) == SSA_NAME
1455 && pi == NULL)
310de761 1456 {
c1b763fa
DN
1457 fprintf (dump_file,
1458 "NOTE: no flow-sensitive alias info for ");
1459 print_generic_expr (dump_file, ptr, dump_flags);
1460 fprintf (dump_file, " in ");
1461 print_generic_stmt (dump_file, stmt, dump_flags);
310de761 1462 }
310de761 1463
c1b763fa
DN
1464 if (TREE_CODE (ptr) == SSA_NAME)
1465 ptr = SSA_NAME_VAR (ptr);
e288e2f5
AM
1466 v_ann = var_ann (ptr);
1467 if (v_ann->type_mem_tag)
1468 add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
310de761
RH
1469 }
1470 }
1471
1472 /* If a constant is used as a pointer, we can't generate a real
1473 operand for it but we mark the statement volatile to prevent
1474 optimizations from messing things up. */
1475 else if (TREE_CODE (ptr) == INTEGER_CST)
1476 {
e288e2f5
AM
1477 if (s_ann)
1478 s_ann->has_volatile_ops = true;
310de761
RH
1479 return;
1480 }
310de761
RH
1481 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1482 else
1e128c5f 1483 gcc_unreachable ();
310de761
RH
1484
1485 /* Add a USE operand for the base pointer. */
1a24f92f 1486 get_expr_operands (stmt, pptr, opf_none);
310de761
RH
1487}
1488
ac182688
ZD
1489/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1490
1491static void
1492get_tmr_operands (tree stmt, tree expr, int flags)
1493{
9be7ee44
ZD
1494 tree tag = TMR_TAG (expr), ref;
1495 HOST_WIDE_INT offset, size, maxsize;
1496 subvar_t svars, sv;
1497 stmt_ann_t s_ann = stmt_ann (stmt);
ac182688
ZD
1498
1499 /* First record the real operands. */
1500 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1501 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1502
1503 /* MEM_REFs should never be killing. */
1504 flags &= ~opf_kill_def;
1505
1506 if (TMR_SYMBOL (expr))
e8ca4159
DN
1507 {
1508 stmt_ann_t ann = stmt_ann (stmt);
1509 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1510 }
ac182688 1511
9be7ee44
ZD
1512 if (!tag)
1513 {
1514 /* Something weird, so ensure that we will be careful. */
1515 stmt_ann (stmt)->has_volatile_ops = true;
1516 return;
1517 }
1518
1519 if (DECL_P (tag))
1520 {
1521 get_expr_operands (stmt, &tag, flags);
1522 return;
1523 }
1524
1525 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1526 gcc_assert (ref != NULL_TREE);
1527 svars = get_subvars_for_var (ref);
1528 for (sv = svars; sv; sv = sv->next)
1529 {
1530 bool exact;
1531 if (overlap_subvar (offset, maxsize, sv, &exact))
1532 {
1533 int subvar_flags = flags;
1534 if (!exact || size != maxsize)
1535 subvar_flags &= ~opf_kill_def;
1536 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1537 }
1538 }
ac182688
ZD
1539}
1540
310de761
RH
1541/* A subroutine of get_expr_operands to handle CALL_EXPR. */
1542
1543static void
1a24f92f 1544get_call_expr_operands (tree stmt, tree expr)
310de761
RH
1545{
1546 tree op;
1547 int call_flags = call_expr_flags (expr);
1548
90c1d75a
DN
1549 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1550 operands for all the symbols that have been found to be
1551 call-clobbered.
1552
1553 Note that if aliases have not been computed, the global effects
1554 of calls will not be included in the SSA web. This is fine
1555 because no optimizer should run before aliases have been
1556 computed. By not bothering with virtual operands for CALL_EXPRs
1557 we avoid adding superfluous virtual operands, which can be a
1558 significant compile time sink (See PR 15855). */
dcd6de6d
ZD
1559 if (aliases_computed_p
1560 && !bitmap_empty_p (call_clobbered_vars)
1561 && !(call_flags & ECF_NOVOPS))
310de761 1562 {
0bca51f0 1563 /* A 'pure' or a 'const' function never call-clobbers anything.
310de761
RH
1564 A 'noreturn' function might, but since we don't return anyway
1565 there is no point in recording that. */
c597ef4e
DN
1566 if (TREE_SIDE_EFFECTS (expr)
1567 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
ea900239 1568 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
c0e1b12f 1569 else if (!(call_flags & ECF_CONST))
d16a5e36 1570 add_call_read_ops (stmt, get_callee_fndecl (expr));
310de761 1571 }
e288e2f5
AM
1572
1573 /* Find uses in the called function. */
1574 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1575
1576 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1577 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1578
1579 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1580
310de761
RH
1581}
1582
6de9cd9a 1583
1a24f92f 1584/* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
6de9cd9a
DN
1585 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1586 the statement's real operands, otherwise it is added to virtual
1a24f92f 1587 operands. */
6de9cd9a
DN
1588
1589static void
e288e2f5 1590add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
6de9cd9a
DN
1591{
1592 bool is_real_op;
1593 tree var, sym;
6de9cd9a
DN
1594 var_ann_t v_ann;
1595
1596 var = *var_p;
243cdfa8 1597 gcc_assert (SSA_VAR_P (var));
6de9cd9a 1598
6de9cd9a 1599 is_real_op = is_gimple_reg (var);
243cdfa8
ZD
1600 /* If this is a real operand, the operand is either ssa name or decl.
1601 Virtual operands may only be decls. */
1602 gcc_assert (is_real_op || DECL_P (var));
6de9cd9a
DN
1603
1604 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1605 v_ann = var_ann (sym);
1606
e79b60a7
DN
1607 /* Mark statements with volatile operands. Optimizers should back
1608 off from statements having volatile operands. */
1609 if (TREE_THIS_VOLATILE (sym) && s_ann)
1610 s_ann->has_volatile_ops = true;
6de9cd9a 1611
0bca51f0
DN
1612 /* If the variable cannot be modified and this is a V_MAY_DEF change
1613 it into a VUSE. This happens when read-only variables are marked
0fa2e4df 1614 call-clobbered and/or aliased to writable variables. So we only
0d2bf6f0
RH
1615 check that this only happens on non-specific stores.
1616
1617 Note that if this is a specific store, i.e. associated with a
1618 modify_expr, then we can't suppress the V_DEF, lest we run into
1619 validation problems.
1620
1621 This can happen when programs cast away const, leaving us with a
1622 store to read-only memory. If the statement is actually executed
1623 at runtime, then the program is ill formed. If the statement is
1624 not executed then all is well. At the very least, we cannot ICE. */
1625 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
0bca51f0
DN
1626 {
1627 gcc_assert (!is_real_op);
0d2bf6f0 1628 flags &= ~(opf_is_def | opf_kill_def);
0bca51f0
DN
1629 }
1630
6de9cd9a
DN
1631 if (is_real_op)
1632 {
1633 /* The variable is a GIMPLE register. Add it to real operands. */
1634 if (flags & opf_is_def)
1a24f92f 1635 append_def (var_p);
6de9cd9a 1636 else
1a24f92f 1637 append_use (var_p);
6de9cd9a
DN
1638 }
1639 else
1640 {
780e37d3 1641 VEC(tree,gc) *aliases;
6de9cd9a
DN
1642
1643 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1644 virtual operands, unless the caller has specifically requested
1645 not to add virtual operands (used when adding operands inside an
1646 ADDR_EXPR expression). */
1647 if (flags & opf_no_vops)
1648 return;
1649
1650 aliases = v_ann->may_aliases;
1651
6de9cd9a
DN
1652 if (aliases == NULL)
1653 {
1654 /* The variable is not aliased or it is an alias tag. */
1655 if (flags & opf_is_def)
1656 {
ed7f7d85 1657 if (flags & opf_kill_def)
50dc9a88 1658 {
c75ab022
DB
1659 /* Only regular variables or struct fields may get a
1660 V_MUST_DEF operand. */
326eda4b
DB
1661 gcc_assert (!MTAG_P (var)
1662 || TREE_CODE (var) == STRUCT_FIELD_TAG);
50dc9a88
DN
1663 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1664 variable definitions. */
1665 append_v_must_def (var);
1666 }
a32b97a2 1667 else
50dc9a88
DN
1668 {
1669 /* Add a V_MAY_DEF for call-clobbered variables and
1670 memory tags. */
1671 append_v_may_def (var);
1672 }
6de9cd9a
DN
1673 }
1674 else
ff88c5aa 1675 append_vuse (var);
6de9cd9a
DN
1676 }
1677 else
1678 {
780e37d3
ZD
1679 unsigned i;
1680 tree al;
6de9cd9a
DN
1681
1682 /* The variable is aliased. Add its aliases to the virtual
1683 operands. */
780e37d3 1684 gcc_assert (VEC_length (tree, aliases) != 0);
6de9cd9a
DN
1685
1686 if (flags & opf_is_def)
1687 {
1688 /* If the variable is also an alias tag, add a virtual
1689 operand for it, otherwise we will miss representing
1690 references to the members of the variable's alias set.
1691 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1692 if (v_ann->is_alias_tag)
e8ca4159 1693 append_v_may_def (var);
6de9cd9a 1694
780e37d3
ZD
1695 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1696 append_v_may_def (al);
6de9cd9a
DN
1697 }
1698 else
1699 {
50dc9a88
DN
1700 /* Similarly, append a virtual uses for VAR itself, when
1701 it is an alias tag. */
6de9cd9a 1702 if (v_ann->is_alias_tag)
1a24f92f 1703 append_vuse (var);
6de9cd9a 1704
780e37d3
ZD
1705 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1706 append_vuse (al);
6de9cd9a
DN
1707 }
1708 }
1709 }
1710}
1711
c75ab022 1712
e8ca4159
DN
1713/* Add the base address of REF to the set *ADDRESSES_TAKEN. If
1714 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
1715 a single variable whose address has been taken or any other valid
1716 GIMPLE memory reference (structure reference, array, etc). If the
1717 base address of REF is a decl that has sub-variables, also add all
1718 of its sub-variables. */
6de9cd9a 1719
e8ca4159
DN
1720void
1721add_to_addressable_set (tree ref, bitmap *addresses_taken)
6de9cd9a 1722{
e8ca4159 1723 tree var;
c75ab022 1724 subvar_t svars;
c75ab022 1725
e8ca4159
DN
1726 gcc_assert (addresses_taken);
1727
23e66a36 1728 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
e8ca4159
DN
1729 as the only thing we take the address of. If VAR is a structure,
1730 taking the address of a field means that the whole structure may
1731 be referenced using pointer arithmetic. See PR 21407 and the
1732 ensuing mailing list discussion. */
1733 var = get_base_address (ref);
6de9cd9a
DN
1734 if (var && SSA_VAR_P (var))
1735 {
e8ca4159
DN
1736 if (*addresses_taken == NULL)
1737 *addresses_taken = BITMAP_GGC_ALLOC ();
c75ab022 1738
c75ab022
DB
1739 if (var_can_have_subvars (var)
1740 && (svars = get_subvars_for_var (var)))
1741 {
1742 subvar_t sv;
1743 for (sv = svars; sv; sv = sv->next)
e8ca4159
DN
1744 {
1745 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
1746 TREE_ADDRESSABLE (sv->var) = 1;
1747 }
c75ab022 1748 }
9044951e 1749 else
e8ca4159
DN
1750 {
1751 bitmap_set_bit (*addresses_taken, DECL_UID (var));
1752 TREE_ADDRESSABLE (var) = 1;
1753 }
6de9cd9a
DN
1754 }
1755}
1756
6de9cd9a
DN
1757/* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1758 clobbered variables in the function. */
1759
1760static void
ea900239 1761add_call_clobber_ops (tree stmt, tree callee)
6de9cd9a 1762{
f47c96aa 1763 unsigned u;
e288e2f5
AM
1764 bitmap_iterator bi;
1765 stmt_ann_t s_ann = stmt_ann (stmt);
ea900239 1766 bitmap not_read_b, not_written_b;
d16a5e36 1767
6de9cd9a
DN
1768 /* Functions that are not const, pure or never return may clobber
1769 call-clobbered variables. */
e288e2f5
AM
1770 if (s_ann)
1771 s_ann->makes_clobbering_call = true;
6de9cd9a 1772
e288e2f5
AM
1773 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1774 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
6de9cd9a 1775 if (global_var)
6de9cd9a 1776 {
e288e2f5
AM
1777 add_stmt_operand (&global_var, s_ann, opf_is_def);
1778 return;
1779 }
6de9cd9a 1780
ea900239
DB
1781 /* Get info for local and module level statics. There is a bit
1782 set for each static if the call being processed does not read
1783 or write that variable. */
1784
1785 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1786 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
e288e2f5 1787 /* Add a V_MAY_DEF operand for every call clobbered variable. */
f47c96aa 1788 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
e288e2f5 1789 {
d16a5e36
DB
1790 tree var = referenced_var_lookup (u);
1791 unsigned int escape_mask = var_ann (var)->escape_mask;
1792 tree real_var = var;
1793 bool not_read;
1794 bool not_written;
1795
1796 /* Not read and not written are computed on regular vars, not
1797 subvars, so look at the parent var if this is an SFT. */
dff85230 1798
d16a5e36
DB
1799 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1800 real_var = SFT_PARENT_VAR (var);
1801
1802 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1803 DECL_UID (real_var)) : false;
1804 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1805 DECL_UID (real_var)) : false;
1806 gcc_assert (!unmodifiable_var_p (var));
1807
1808 clobber_stats.clobbered_vars++;
1809
1810 /* See if this variable is really clobbered by this function. */
1811
1812 /* Trivial case: Things escaping only to pure/const are not
1813 clobbered by non-pure-const, and only read by pure/const. */
1814 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
ea900239 1815 {
d16a5e36
DB
1816 tree call = get_call_expr_in (stmt);
1817 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
ea900239 1818 {
d16a5e36
DB
1819 add_stmt_operand (&var, s_ann, opf_none);
1820 clobber_stats.unescapable_clobbers_avoided++;
1821 continue;
ea900239
DB
1822 }
1823 else
d16a5e36
DB
1824 {
1825 clobber_stats.unescapable_clobbers_avoided++;
1826 continue;
1827 }
ea900239 1828 }
d16a5e36
DB
1829
1830 if (not_written)
1831 {
1832 clobber_stats.static_write_clobbers_avoided++;
1833 if (!not_read)
1834 add_stmt_operand (&var, s_ann, opf_none);
1835 else
1836 clobber_stats.static_read_clobbers_avoided++;
1837 }
1838 else
1839 add_stmt_operand (&var, s_ann, opf_is_def);
e288e2f5 1840 }
d16a5e36 1841
6de9cd9a
DN
1842}
1843
1844
1845/* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1846 function. */
1847
1848static void
d16a5e36 1849add_call_read_ops (tree stmt, tree callee)
6de9cd9a 1850{
f47c96aa 1851 unsigned u;
87c476a2 1852 bitmap_iterator bi;
e288e2f5 1853 stmt_ann_t s_ann = stmt_ann (stmt);
d16a5e36 1854 bitmap not_read_b;
87c476a2 1855
e288e2f5
AM
1856 /* if the function is not pure, it may reference memory. Add
1857 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1858 for the heuristic used to decide whether to create .GLOBAL_VAR. */
6de9cd9a 1859 if (global_var)
6de9cd9a 1860 {
e288e2f5
AM
1861 add_stmt_operand (&global_var, s_ann, opf_none);
1862 return;
1863 }
1864
d16a5e36 1865 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
e288e2f5
AM
1866
1867 /* Add a VUSE for each call-clobbered variable. */
f47c96aa 1868 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
e288e2f5 1869 {
f47c96aa 1870 tree var = referenced_var (u);
d16a5e36
DB
1871 tree real_var = var;
1872 bool not_read;
1873
1874 clobber_stats.readonly_clobbers++;
f47c96aa 1875
d16a5e36
DB
1876 /* Not read and not written are computed on regular vars, not
1877 subvars, so look at the parent var if this is an SFT. */
e288e2f5 1878
d16a5e36
DB
1879 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1880 real_var = SFT_PARENT_VAR (var);
6de9cd9a 1881
d16a5e36
DB
1882 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1883 DECL_UID (real_var)) : false;
1884
1885 if (not_read)
1886 {
1887 clobber_stats.static_readonly_clobbers_avoided++;
1888 continue;
1889 }
1890
1891 add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
1892 }
5f240ec4
ZD
1893}
1894
1a24f92f 1895
f430bae8
AM
1896/* Scan the immediate_use list for VAR making sure its linked properly.
1897 return RTUE iof there is a problem. */
1898
1899bool
1900verify_imm_links (FILE *f, tree var)
1901{
f47c96aa 1902 use_operand_p ptr, prev, list;
f430bae8
AM
1903 int count;
1904
1905 gcc_assert (TREE_CODE (var) == SSA_NAME);
1906
1907 list = &(SSA_NAME_IMM_USE_NODE (var));
1908 gcc_assert (list->use == NULL);
1909
1910 if (list->prev == NULL)
1911 {
1912 gcc_assert (list->next == NULL);
1913 return false;
1914 }
1915
1916 prev = list;
1917 count = 0;
1918 for (ptr = list->next; ptr != list; )
1919 {
1920 if (prev != ptr->prev)
0e61db61
NS
1921 goto error;
1922
f430bae8 1923 if (ptr->use == NULL)
0e61db61
NS
1924 goto error; /* 2 roots, or SAFE guard node. */
1925 else if (*(ptr->use) != var)
1926 goto error;
f430bae8
AM
1927
1928 prev = ptr;
1929 ptr = ptr->next;
e84d8064
AM
1930 /* Avoid infinite loops. 50,000,000 uses probably indicates a problem. */
1931 if (count++ > 50000000)
0e61db61 1932 goto error;
f430bae8
AM
1933 }
1934
1935 /* Verify list in the other direction. */
1936 prev = list;
1937 for (ptr = list->prev; ptr != list; )
1938 {
1939 if (prev != ptr->next)
0e61db61 1940 goto error;
f430bae8
AM
1941 prev = ptr;
1942 ptr = ptr->prev;
1943 if (count-- < 0)
0e61db61 1944 goto error;
f430bae8
AM
1945 }
1946
1947 if (count != 0)
0e61db61 1948 goto error;
f430bae8
AM
1949
1950 return false;
0e61db61
NS
1951
1952 error:
1953 if (ptr->stmt && stmt_modified_p (ptr->stmt))
1954 {
1955 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
1956 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
1957 }
1958 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1959 (void *)ptr->use);
1960 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1961 fprintf(f, "\n");
1962 return true;
f430bae8
AM
1963}
1964
1965
1966/* Dump all the immediate uses to FILE. */
1967
1968void
1969dump_immediate_uses_for (FILE *file, tree var)
1970{
1971 imm_use_iterator iter;
1972 use_operand_p use_p;
1973
1974 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1975
1976 print_generic_expr (file, var, TDF_SLIM);
1977 fprintf (file, " : -->");
1978 if (has_zero_uses (var))
1979 fprintf (file, " no uses.\n");
1980 else
1981 if (has_single_use (var))
1982 fprintf (file, " single use.\n");
1983 else
1984 fprintf (file, "%d uses.\n", num_imm_uses (var));
1985
1986 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1987 {
f47c96aa
AM
1988 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1989 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
1990 else
1991 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
f430bae8
AM
1992 }
1993 fprintf(file, "\n");
1994}
1995
1996/* Dump all the immediate uses to FILE. */
1997
1998void
1999dump_immediate_uses (FILE *file)
2000{
2001 tree var;
2002 unsigned int x;
2003
2004 fprintf (file, "Immediate_uses: \n\n");
2005 for (x = 1; x < num_ssa_names; x++)
2006 {
2007 var = ssa_name(x);
2008 if (!var)
2009 continue;
2010 dump_immediate_uses_for (file, var);
2011 }
2012}
2013
2014
2015/* Dump def-use edges on stderr. */
2016
2017void
2018debug_immediate_uses (void)
2019{
2020 dump_immediate_uses (stderr);
2021}
2022
2023/* Dump def-use edges on stderr. */
2024
2025void
2026debug_immediate_uses_for (tree var)
2027{
2028 dump_immediate_uses_for (stderr, var);
1a24f92f 2029}
6de9cd9a 2030#include "gt-tree-ssa-operands.h"
This page took 1.269281 seconds and 5 git commands to generate.