]> gcc.gnu.org Git - gcc.git/blame - gcc/tree-ssa-operands.c
re PR middle-end/28915 (ICE: tree check: expected class 'constant', have 'declaration...
[gcc.git] / gcc / tree-ssa-operands.c
CommitLineData
6de9cd9a 1/* SSA operands management for trees.
20f06221 2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
6de9cd9a
DN
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING. If not, write to
366ccddb
KC
18the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19Boston, MA 02110-1301, USA. */
6de9cd9a
DN
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "flags.h"
27#include "function.h"
28#include "diagnostic.h"
29#include "tree-flow.h"
30#include "tree-inline.h"
31#include "tree-pass.h"
32#include "ggc.h"
33#include "timevar.h"
4c714dd4 34#include "toplev.h"
6674a6ce 35#include "langhooks.h"
ea900239 36#include "ipa-reference.h"
1a24f92f 37
6cb38cd4 38/* This file contains the code required to manage the operands cache of the
1a24f92f 39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
6cb38cd4 40 annotation. This cache contains operands that will be of interest to
1a24f92f
AM
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
1a24f92f 53 The operand tree is the parsed by the various get_* routines which look
2a7e31df 54 through the stmt tree for the occurrence of operands which may be of
1a24f92f
AM
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
59
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
62
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
66
67 If the stmt had a previous operand cache, the finalization routines
f3b569ca 68 attempt to match up the new operands with the old ones. If it's a perfect
1a24f92f
AM
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
74
454ff5cb 75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
1a24f92f 76 vector for VUSE, then the new vector will also be modified such that
02075bb2 77 it contains 'a_5' rather than 'a'. */
1a24f92f 78
1e6a5d3c 79/* Flags to describe operand properties in helpers. */
6de9cd9a
DN
80
81/* By default, operands are loaded. */
82#define opf_none 0
83
a32b97a2 84/* Operand is the target of an assignment expression or a
65ad7c63 85 call-clobbered variable. */
6de9cd9a
DN
86#define opf_is_def (1 << 0)
87
a32b97a2 88/* Operand is the target of an assignment expression. */
50dc9a88 89#define opf_kill_def (1 << 1)
a32b97a2 90
6de9cd9a
DN
91/* No virtual operands should be created in the expression. This is used
92 when traversing ADDR_EXPR nodes which have different semantics than
93 other expressions. Inside an ADDR_EXPR node, the only operands that we
94 need to consider are indices into arrays. For instance, &a.b[i] should
95 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
96 VUSE for 'b'. */
50dc9a88 97#define opf_no_vops (1 << 2)
6de9cd9a 98
65ad7c63
DN
99/* Operand is a "non-specific" kill for call-clobbers and such. This
100 is used to distinguish "reset the world" events from explicit
101 MODIFY_EXPRs. */
0d2bf6f0
RH
102#define opf_non_specific (1 << 3)
103
6de9cd9a 104/* Array for building all the def operands. */
f3940b0e 105static VEC(tree,heap) *build_defs;
6de9cd9a
DN
106
107/* Array for building all the use operands. */
f3940b0e 108static VEC(tree,heap) *build_uses;
6de9cd9a 109
65ad7c63 110/* Array for building all the V_MAY_DEF operands. */
f3940b0e 111static VEC(tree,heap) *build_v_may_defs;
6de9cd9a 112
65ad7c63 113/* Array for building all the VUSE operands. */
f3940b0e 114static VEC(tree,heap) *build_vuses;
6de9cd9a 115
65ad7c63 116/* Array for building all the V_MUST_DEF operands. */
f3940b0e 117static VEC(tree,heap) *build_v_must_defs;
a32b97a2 118
6668f6a7 119/* These arrays are the cached operand vectors for call clobbered calls. */
f47c96aa 120static bool ops_active = false;
4c124b4c 121
f47c96aa
AM
122static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
123static unsigned operand_memory_index;
4c124b4c 124
1a24f92f 125static void get_expr_operands (tree, tree *, int);
02075bb2 126
f47c96aa
AM
127static def_optype_p free_defs = NULL;
128static use_optype_p free_uses = NULL;
129static vuse_optype_p free_vuses = NULL;
130static maydef_optype_p free_maydefs = NULL;
131static mustdef_optype_p free_mustdefs = NULL;
1a24f92f 132
ac574e1b
ZD
133/* Allocates operand OP of given TYPE from the appropriate free list,
134 or of the new value if the list is empty. */
135
136#define ALLOC_OPTYPE(OP, TYPE) \
137 do \
138 { \
139 TYPE##_optype_p ret = free_##TYPE##s; \
140 if (ret) \
141 free_##TYPE##s = ret->next; \
142 else \
143 ret = ssa_operand_alloc (sizeof (*ret)); \
144 (OP) = ret; \
145 } while (0)
1a24f92f 146
c83eecad 147/* Return the DECL_UID of the base variable of T. */
1a24f92f 148
f47c96aa 149static inline unsigned
f3940b0e 150get_name_decl (tree t)
6de9cd9a 151{
f3940b0e
AM
152 if (TREE_CODE (t) != SSA_NAME)
153 return DECL_UID (t);
154 else
155 return DECL_UID (SSA_NAME_VAR (t));
6de9cd9a
DN
156}
157
02075bb2 158
f3940b0e 159/* Comparison function for qsort used in operand_build_sort_virtual. */
1a24f92f 160
f3940b0e
AM
161static int
162operand_build_cmp (const void *p, const void *q)
a32b97a2 163{
f3940b0e
AM
164 tree e1 = *((const tree *)p);
165 tree e2 = *((const tree *)q);
166 unsigned int u1,u2;
167
168 u1 = get_name_decl (e1);
169 u2 = get_name_decl (e2);
f47c96aa 170
f3940b0e 171 /* We want to sort in ascending order. They can never be equal. */
f47c96aa 172#ifdef ENABLE_CHECKING
f3940b0e 173 gcc_assert (u1 != u2);
f47c96aa 174#endif
f3940b0e 175 return (u1 > u2 ? 1 : -1);
a32b97a2
BB
176}
177
02075bb2 178
f3940b0e 179/* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
1a24f92f 180
6de9cd9a 181static inline void
f3940b0e 182operand_build_sort_virtual (VEC(tree,heap) *list)
6de9cd9a 183{
f3940b0e 184 int num = VEC_length (tree, list);
65ad7c63 185
f3940b0e
AM
186 if (num < 2)
187 return;
65ad7c63 188
f3940b0e 189 if (num == 2)
6de9cd9a 190 {
f3940b0e
AM
191 if (get_name_decl (VEC_index (tree, list, 0))
192 > get_name_decl (VEC_index (tree, list, 1)))
193 {
194 /* Swap elements if in the wrong order. */
195 tree tmp = VEC_index (tree, list, 0);
196 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
197 VEC_replace (tree, list, 1, tmp);
198 }
f47c96aa 199 return;
6de9cd9a 200 }
65ad7c63 201
f3940b0e
AM
202 /* There are 3 or more elements, call qsort. */
203 qsort (VEC_address (tree, list),
204 VEC_length (tree, list),
205 sizeof (tree),
206 operand_build_cmp);
6de9cd9a
DN
207}
208
f430bae8 209
65ad7c63 210/* Return true if the SSA operands cache is active. */
1a24f92f 211
f47c96aa
AM
212bool
213ssa_operands_active (void)
6de9cd9a 214{
f47c96aa
AM
215 return ops_active;
216}
6de9cd9a 217
02075bb2 218
d16a5e36
DB
219/* Structure storing statistics on how many call clobbers we have, and
220 how many where avoided. */
02075bb2 221
d16a5e36
DB
222static struct
223{
224 /* Number of call-clobbered ops we attempt to add to calls in
225 add_call_clobber_ops. */
226 unsigned int clobbered_vars;
227
65ad7c63 228 /* Number of write-clobbers (V_MAY_DEFs) avoided by using
d16a5e36
DB
229 not_written information. */
230 unsigned int static_write_clobbers_avoided;
231
65ad7c63 232 /* Number of reads (VUSEs) avoided by using not_read information. */
d16a5e36
DB
233 unsigned int static_read_clobbers_avoided;
234
235 /* Number of write-clobbers avoided because the variable can't escape to
236 this call. */
237 unsigned int unescapable_clobbers_avoided;
6de9cd9a 238
65ad7c63 239 /* Number of read-only uses we attempt to add to calls in
d16a5e36
DB
240 add_call_read_ops. */
241 unsigned int readonly_clobbers;
242
65ad7c63 243 /* Number of read-only uses we avoid using not_read information. */
d16a5e36
DB
244 unsigned int static_readonly_clobbers_avoided;
245} clobber_stats;
246
02075bb2 247
f47c96aa
AM
248/* Initialize the operand cache routines. */
249
250void
251init_ssa_operands (void)
252{
f3940b0e
AM
253 build_defs = VEC_alloc (tree, heap, 5);
254 build_uses = VEC_alloc (tree, heap, 10);
255 build_vuses = VEC_alloc (tree, heap, 25);
256 build_v_may_defs = VEC_alloc (tree, heap, 25);
257 build_v_must_defs = VEC_alloc (tree, heap, 25);
258
f47c96aa
AM
259 gcc_assert (operand_memory == NULL);
260 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
261 ops_active = true;
d16a5e36 262 memset (&clobber_stats, 0, sizeof (clobber_stats));
f47c96aa 263}
6de9cd9a 264
1a24f92f 265
f47c96aa
AM
266/* Dispose of anything required by the operand routines. */
267
268void
269fini_ssa_operands (void)
270{
271 struct ssa_operand_memory_d *ptr;
f3940b0e
AM
272 VEC_free (tree, heap, build_defs);
273 VEC_free (tree, heap, build_uses);
274 VEC_free (tree, heap, build_v_must_defs);
275 VEC_free (tree, heap, build_v_may_defs);
276 VEC_free (tree, heap, build_vuses);
f47c96aa
AM
277 free_defs = NULL;
278 free_uses = NULL;
279 free_vuses = NULL;
280 free_maydefs = NULL;
281 free_mustdefs = NULL;
282 while ((ptr = operand_memory) != NULL)
283 {
284 operand_memory = operand_memory->next;
285 ggc_free (ptr);
1a24f92f
AM
286 }
287
f47c96aa 288 ops_active = false;
d16a5e36
DB
289
290 if (dump_file && (dump_flags & TDF_STATS))
291 {
02075bb2
DN
292 fprintf (dump_file, "Original clobbered vars:%d\n",
293 clobber_stats.clobbered_vars);
294 fprintf (dump_file, "Static write clobbers avoided:%d\n",
295 clobber_stats.static_write_clobbers_avoided);
296 fprintf (dump_file, "Static read clobbers avoided:%d\n",
297 clobber_stats.static_read_clobbers_avoided);
298 fprintf (dump_file, "Unescapable clobbers avoided:%d\n",
299 clobber_stats.unescapable_clobbers_avoided);
65ad7c63 300 fprintf (dump_file, "Original read-only clobbers:%d\n",
02075bb2 301 clobber_stats.readonly_clobbers);
65ad7c63 302 fprintf (dump_file, "Static read-only clobbers avoided:%d\n",
02075bb2 303 clobber_stats.static_readonly_clobbers_avoided);
d16a5e36 304 }
f47c96aa 305}
1a24f92f 306
6de9cd9a 307
f47c96aa
AM
308/* Return memory for operands of SIZE chunks. */
309
310static inline void *
311ssa_operand_alloc (unsigned size)
312{
313 char *ptr;
314 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
315 {
316 struct ssa_operand_memory_d *ptr;
e1111e8e 317 ptr = GGC_NEW (struct ssa_operand_memory_d);
f47c96aa
AM
318 ptr->next = operand_memory;
319 operand_memory = ptr;
320 operand_memory_index = 0;
321 }
322 ptr = &(operand_memory->mem[operand_memory_index]);
323 operand_memory_index += size;
324 return ptr;
6de9cd9a
DN
325}
326
1a24f92f 327
f430bae8 328
5dc2e333 329/* This routine makes sure that PTR is in an immediate use list, and makes
6c00f606 330 sure the stmt pointer is set to the current stmt. */
02075bb2 331
5dc2e333
AM
332static inline void
333set_virtual_use_link (use_operand_p ptr, tree stmt)
334{
65ad7c63 335 /* fold_stmt may have changed the stmt pointers. */
5dc2e333
AM
336 if (ptr->stmt != stmt)
337 ptr->stmt = stmt;
338
339 /* If this use isn't in a list, add it to the correct list. */
340 if (!ptr->prev)
341 link_imm_use (ptr, *(ptr->use));
342}
343
ac574e1b
ZD
344/* Appends ELT after TO, and moves the TO pointer to ELT. */
345
346#define APPEND_OP_AFTER(ELT, TO) \
347 do \
348 { \
349 (TO)->next = (ELT); \
350 (TO) = (ELT); \
351 } while (0)
352
353/* Appends head of list FROM after TO, and move both pointers
354 to their successors. */
355
356#define MOVE_HEAD_AFTER(FROM, TO) \
357 do \
358 { \
359 APPEND_OP_AFTER (FROM, TO); \
360 (FROM) = (FROM)->next; \
361 } while (0)
362
363/* Moves OP to appropriate freelist. OP is set to its successor. */
364
365#define MOVE_HEAD_TO_FREELIST(OP, TYPE) \
366 do \
367 { \
368 TYPE##_optype_p next = (OP)->next; \
369 (OP)->next = free_##TYPE##s; \
370 free_##TYPE##s = (OP); \
371 (OP) = next; \
372 } while (0)
373
374/* Initializes immediate use at USE_PTR to value VAL, and links it to the list
917f1b7e 375 of immediate uses. STMT is the current statement. */
ac574e1b
ZD
376
377#define INITIALIZE_USE(USE_PTR, VAL, STMT) \
378 do \
379 { \
380 (USE_PTR)->use = (VAL); \
381 link_imm_use_stmt ((USE_PTR), *(VAL), (STMT)); \
382 } while (0)
383
384/* Adds OP to the list of defs after LAST, and moves
385 LAST to the new element. */
5dc2e333 386
ac574e1b
ZD
387static inline void
388add_def_op (tree *op, def_optype_p *last)
389{
390 def_optype_p new;
391
392 ALLOC_OPTYPE (new, def);
393 DEF_OP_PTR (new) = op;
394 APPEND_OP_AFTER (new, *last);
395}
396
397/* Adds OP to the list of uses of statement STMT after LAST, and moves
398 LAST to the new element. */
399
400static inline void
401add_use_op (tree stmt, tree *op, use_optype_p *last)
402{
403 use_optype_p new;
404
405 ALLOC_OPTYPE (new, use);
406 INITIALIZE_USE (USE_OP_PTR (new), op, stmt);
407 APPEND_OP_AFTER (new, *last);
408}
409
410/* Adds OP to the list of vuses of statement STMT after LAST, and moves
411 LAST to the new element. */
412
413static inline void
414add_vuse_op (tree stmt, tree op, vuse_optype_p *last)
415{
416 vuse_optype_p new;
417
418 ALLOC_OPTYPE (new, vuse);
419 VUSE_OP (new) = op;
420 INITIALIZE_USE (VUSE_OP_PTR (new), &VUSE_OP (new), stmt);
421 APPEND_OP_AFTER (new, *last);
422}
423
424/* Adds OP to the list of maydefs of statement STMT after LAST, and moves
425 LAST to the new element. */
426
427static inline void
428add_maydef_op (tree stmt, tree op, maydef_optype_p *last)
429{
430 maydef_optype_p new;
431
432 ALLOC_OPTYPE (new, maydef);
433 MAYDEF_RESULT (new) = op;
434 MAYDEF_OP (new) = op;
435 INITIALIZE_USE (MAYDEF_OP_PTR (new), &MAYDEF_OP (new), stmt);
436 APPEND_OP_AFTER (new, *last);
437}
438
439/* Adds OP to the list of mustdefs of statement STMT after LAST, and moves
440 LAST to the new element. */
441
442static inline void
443add_mustdef_op (tree stmt, tree op, mustdef_optype_p *last)
444{
445 mustdef_optype_p new;
446
447 ALLOC_OPTYPE (new, mustdef);
448 MUSTDEF_RESULT (new) = op;
449 MUSTDEF_KILL (new) = op;
450 INITIALIZE_USE (MUSTDEF_KILL_PTR (new), &MUSTDEF_KILL (new), stmt);
451 APPEND_OP_AFTER (new, *last);
452}
453
454/* Takes elements from build_defs and turns them into def operands of STMT.
917f1b7e 455 TODO -- Given that def operands list is not necessarily sorted, merging
ac574e1b
ZD
456 the operands this way does not make much sense.
457 -- Make build_defs VEC of tree *. */
458
459static inline void
460finalize_ssa_def_ops (tree stmt)
461{
462 unsigned new_i;
463 struct def_optype_d new_list;
6677e189 464 def_optype_p old_ops, last;
ac574e1b
ZD
465 tree *old_base;
466
467 new_list.next = NULL;
468 last = &new_list;
469
470 old_ops = DEF_OPS (stmt);
471
472 new_i = 0;
473 while (old_ops && new_i < VEC_length (tree, build_defs))
474 {
475 tree *new_base = (tree *) VEC_index (tree, build_defs, new_i);
476 old_base = DEF_OP_PTR (old_ops);
477
478 if (old_base == new_base)
479 {
480 /* if variables are the same, reuse this node. */
481 MOVE_HEAD_AFTER (old_ops, last);
482 new_i++;
483 }
484 else if (old_base < new_base)
485 {
486 /* if old is less than new, old goes to the free list. */
487 MOVE_HEAD_TO_FREELIST (old_ops, def);
488 }
489 else
490 {
491 /* This is a new operand. */
492 add_def_op (new_base, &last);
493 new_i++;
494 }
495 }
496
497 /* If there is anything remaining in the build_defs list, simply emit it. */
498 for ( ; new_i < VEC_length (tree, build_defs); new_i++)
499 add_def_op ((tree *) VEC_index (tree, build_defs, new_i), &last);
1a24f92f 500
ac574e1b
ZD
501 last->next = NULL;
502
503 /* If there is anything in the old list, free it. */
504 if (old_ops)
505 {
506 old_ops->next = free_defs;
507 free_defs = old_ops;
508 }
509
510 /* Now set the stmt's operands. */
511 DEF_OPS (stmt) = new_list.next;
512
513#ifdef ENABLE_CHECKING
514 {
6677e189 515 def_optype_p ptr;
ac574e1b
ZD
516 unsigned x = 0;
517 for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
518 x++;
519
520 gcc_assert (x == VEC_length (tree, build_defs));
521 }
522#endif
523}
f47c96aa
AM
524
525/* This routine will create stmt operands for STMT from the def build list. */
526
527static void
528finalize_ssa_defs (tree stmt)
6de9cd9a 529{
f3940b0e 530 unsigned int num = VEC_length (tree, build_defs);
02075bb2 531
f47c96aa
AM
532 /* There should only be a single real definition per assignment. */
533 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
6de9cd9a 534
f47c96aa
AM
535 /* If there is an old list, often the new list is identical, or close, so
536 find the elements at the beginning that are the same as the vector. */
f47c96aa 537 finalize_ssa_def_ops (stmt);
f3940b0e 538 VEC_truncate (tree, build_defs, 0);
f47c96aa 539}
6de9cd9a 540
ac574e1b 541/* Takes elements from build_uses and turns them into use operands of STMT.
6c00f606 542 TODO -- Make build_uses VEC of tree *. */
ac574e1b
ZD
543
544static inline void
545finalize_ssa_use_ops (tree stmt)
546{
547 unsigned new_i;
548 struct use_optype_d new_list;
549 use_optype_p old_ops, ptr, last;
ac574e1b
ZD
550
551 new_list.next = NULL;
552 last = &new_list;
553
554 old_ops = USE_OPS (stmt);
555
ac574e1b
ZD
556 /* If there is anything in the old list, free it. */
557 if (old_ops)
558 {
559 for (ptr = old_ops; ptr; ptr = ptr->next)
560 delink_imm_use (USE_OP_PTR (ptr));
561 old_ops->next = free_uses;
562 free_uses = old_ops;
563 }
564
6c00f606
AM
565 /* Now create nodes for all the new nodes. */
566 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
567 add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last);
568
569 last->next = NULL;
570
ac574e1b
ZD
571 /* Now set the stmt's operands. */
572 USE_OPS (stmt) = new_list.next;
573
574#ifdef ENABLE_CHECKING
575 {
576 unsigned x = 0;
577 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
578 x++;
579
580 gcc_assert (x == VEC_length (tree, build_uses));
581 }
582#endif
583}
f47c96aa
AM
584
585/* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
586
587static void
588finalize_ssa_uses (tree stmt)
589{
6de9cd9a
DN
590#ifdef ENABLE_CHECKING
591 {
592 unsigned x;
f3940b0e 593 unsigned num = VEC_length (tree, build_uses);
f47c96aa 594
6de9cd9a 595 /* If the pointer to the operand is the statement itself, something is
f47c96aa 596 wrong. It means that we are pointing to a local variable (the
65ad7c63 597 initial call to update_stmt_operands does not pass a pointer to a
f47c96aa 598 statement). */
6de9cd9a 599 for (x = 0; x < num; x++)
f3940b0e 600 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
6de9cd9a
DN
601 }
602#endif
f47c96aa 603 finalize_ssa_use_ops (stmt);
f3940b0e 604 VEC_truncate (tree, build_uses, 0);
6de9cd9a 605}
ac574e1b
ZD
606
607
608/* Takes elements from build_v_may_defs and turns them into maydef operands of
609 STMT. */
610
611static inline void
612finalize_ssa_v_may_def_ops (tree stmt)
613{
614 unsigned new_i;
615 struct maydef_optype_d new_list;
616 maydef_optype_p old_ops, ptr, last;
617 tree act;
618 unsigned old_base, new_base;
619
620 new_list.next = NULL;
621 last = &new_list;
622
623 old_ops = MAYDEF_OPS (stmt);
624
625 new_i = 0;
626 while (old_ops && new_i < VEC_length (tree, build_v_may_defs))
627 {
628 act = VEC_index (tree, build_v_may_defs, new_i);
629 new_base = get_name_decl (act);
630 old_base = get_name_decl (MAYDEF_OP (old_ops));
631
632 if (old_base == new_base)
633 {
634 /* if variables are the same, reuse this node. */
635 MOVE_HEAD_AFTER (old_ops, last);
636 set_virtual_use_link (MAYDEF_OP_PTR (last), stmt);
637 new_i++;
638 }
639 else if (old_base < new_base)
640 {
641 /* if old is less than new, old goes to the free list. */
642 delink_imm_use (MAYDEF_OP_PTR (old_ops));
643 MOVE_HEAD_TO_FREELIST (old_ops, maydef);
644 }
645 else
646 {
647 /* This is a new operand. */
648 add_maydef_op (stmt, act, &last);
649 new_i++;
650 }
651 }
652
653 /* If there is anything remaining in the build_v_may_defs list, simply emit it. */
654 for ( ; new_i < VEC_length (tree, build_v_may_defs); new_i++)
655 add_maydef_op (stmt, VEC_index (tree, build_v_may_defs, new_i), &last);
656
657 last->next = NULL;
658
659 /* If there is anything in the old list, free it. */
660 if (old_ops)
661 {
662 for (ptr = old_ops; ptr; ptr = ptr->next)
663 delink_imm_use (MAYDEF_OP_PTR (ptr));
664 old_ops->next = free_maydefs;
665 free_maydefs = old_ops;
666 }
667
668 /* Now set the stmt's operands. */
669 MAYDEF_OPS (stmt) = new_list.next;
670
671#ifdef ENABLE_CHECKING
672 {
673 unsigned x = 0;
674 for (ptr = MAYDEF_OPS (stmt); ptr; ptr = ptr->next)
675 x++;
676
677 gcc_assert (x == VEC_length (tree, build_v_may_defs));
678 }
679#endif
680}
681
f47c96aa
AM
682static void
683finalize_ssa_v_may_defs (tree stmt)
6de9cd9a 684{
f47c96aa 685 finalize_ssa_v_may_def_ops (stmt);
6de9cd9a 686}
f47c96aa 687
6de9cd9a 688
65ad7c63 689/* Clear the in_list bits and empty the build array for V_MAY_DEFs. */
e288e2f5
AM
690
691static inline void
692cleanup_v_may_defs (void)
693{
694 unsigned x, num;
f3940b0e 695 num = VEC_length (tree, build_v_may_defs);
e288e2f5
AM
696
697 for (x = 0; x < num; x++)
698 {
f3940b0e 699 tree t = VEC_index (tree, build_v_may_defs, x);
f47c96aa
AM
700 if (TREE_CODE (t) != SSA_NAME)
701 {
702 var_ann_t ann = var_ann (t);
703 ann->in_v_may_def_list = 0;
704 }
e288e2f5 705 }
f3940b0e 706 VEC_truncate (tree, build_v_may_defs, 0);
f47c96aa
AM
707}
708
e288e2f5 709
ac574e1b
ZD
710/* Takes elements from build_vuses and turns them into vuse operands of
711 STMT. */
712
713static inline void
714finalize_ssa_vuse_ops (tree stmt)
715{
716 unsigned new_i;
717 struct vuse_optype_d new_list;
718 vuse_optype_p old_ops, ptr, last;
719 tree act;
720 unsigned old_base, new_base;
721
722 new_list.next = NULL;
723 last = &new_list;
724
725 old_ops = VUSE_OPS (stmt);
726
727 new_i = 0;
728 while (old_ops && new_i < VEC_length (tree, build_vuses))
729 {
730 act = VEC_index (tree, build_vuses, new_i);
731 new_base = get_name_decl (act);
732 old_base = get_name_decl (VUSE_OP (old_ops));
1a24f92f 733
ac574e1b
ZD
734 if (old_base == new_base)
735 {
736 /* if variables are the same, reuse this node. */
737 MOVE_HEAD_AFTER (old_ops, last);
738 set_virtual_use_link (VUSE_OP_PTR (last), stmt);
739 new_i++;
740 }
741 else if (old_base < new_base)
742 {
743 /* if old is less than new, old goes to the free list. */
744 delink_imm_use (USE_OP_PTR (old_ops));
745 MOVE_HEAD_TO_FREELIST (old_ops, vuse);
746 }
747 else
748 {
749 /* This is a new operand. */
750 add_vuse_op (stmt, act, &last);
751 new_i++;
752 }
753 }
754
755 /* If there is anything remaining in the build_vuses list, simply emit it. */
756 for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
757 add_vuse_op (stmt, VEC_index (tree, build_vuses, new_i), &last);
758
759 last->next = NULL;
760
761 /* If there is anything in the old list, free it. */
762 if (old_ops)
763 {
764 for (ptr = old_ops; ptr; ptr = ptr->next)
765 delink_imm_use (VUSE_OP_PTR (ptr));
766 old_ops->next = free_vuses;
767 free_vuses = old_ops;
768 }
769
770 /* Now set the stmt's operands. */
771 VUSE_OPS (stmt) = new_list.next;
772
773#ifdef ENABLE_CHECKING
774 {
775 unsigned x = 0;
776 for (ptr = VUSE_OPS (stmt); ptr; ptr = ptr->next)
777 x++;
778
779 gcc_assert (x == VEC_length (tree, build_vuses));
780 }
781#endif
782}
783
65ad7c63 784/* Return a new VUSE operand vector, comparing to OLD_OPS_P. */
f47c96aa
AM
785
786static void
787finalize_ssa_vuses (tree stmt)
1a24f92f 788{
f47c96aa 789 unsigned num, num_v_may_defs;
f3940b0e 790 unsigned vuse_index;
6de9cd9a
DN
791
792 /* Remove superfluous VUSE operands. If the statement already has a
65ad7c63
DN
793 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is
794 not needed because V_MAY_DEFs imply a VUSE of the variable. For
795 instance, suppose that variable 'a' is aliased:
6de9cd9a
DN
796
797 # VUSE <a_2>
a32b97a2 798 # a_3 = V_MAY_DEF <a_2>
6de9cd9a
DN
799 a = a + 1;
800
65ad7c63
DN
801 The VUSE <a_2> is superfluous because it is implied by the
802 V_MAY_DEF operation. */
f3940b0e
AM
803 num = VEC_length (tree, build_vuses);
804 num_v_may_defs = VEC_length (tree, build_v_may_defs);
1a24f92f 805
f47c96aa 806 if (num > 0 && num_v_may_defs > 0)
6de9cd9a 807 {
f3940b0e 808 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
f47c96aa
AM
809 {
810 tree vuse;
f3940b0e 811 vuse = VEC_index (tree, build_vuses, vuse_index);
e288e2f5 812 if (TREE_CODE (vuse) != SSA_NAME)
6de9cd9a 813 {
e288e2f5
AM
814 var_ann_t ann = var_ann (vuse);
815 ann->in_vuse_list = 0;
816 if (ann->in_v_may_def_list)
817 {
f3940b0e 818 VEC_ordered_remove (tree, build_vuses, vuse_index);
f47c96aa 819 continue;
6de9cd9a 820 }
6de9cd9a 821 }
f3940b0e 822 vuse_index++;
6de9cd9a
DN
823 }
824 }
e288e2f5 825 else
65ad7c63
DN
826 {
827 /* Clear out the in_list bits. */
828 for (vuse_index = 0;
829 vuse_index < VEC_length (tree, build_vuses);
830 vuse_index++)
831 {
832 tree t = VEC_index (tree, build_vuses, vuse_index);
833 if (TREE_CODE (t) != SSA_NAME)
834 {
835 var_ann_t ann = var_ann (t);
836 ann->in_vuse_list = 0;
837 }
838 }
839 }
e288e2f5 840
f47c96aa 841 finalize_ssa_vuse_ops (stmt);
65ad7c63
DN
842
843 /* The V_MAY_DEF build vector wasn't cleaned up because we needed it. */
e288e2f5 844 cleanup_v_may_defs ();
f47c96aa 845
65ad7c63 846 /* Free the VUSEs build vector. */
f3940b0e 847 VEC_truncate (tree, build_vuses, 0);
1a24f92f 848
6de9cd9a 849}
1a24f92f 850
ac574e1b
ZD
851/* Takes elements from build_v_must_defs and turns them into mustdef operands of
852 STMT. */
853
854static inline void
855finalize_ssa_v_must_def_ops (tree stmt)
856{
857 unsigned new_i;
858 struct mustdef_optype_d new_list;
859 mustdef_optype_p old_ops, ptr, last;
860 tree act;
861 unsigned old_base, new_base;
862
863 new_list.next = NULL;
864 last = &new_list;
865
866 old_ops = MUSTDEF_OPS (stmt);
867
868 new_i = 0;
869 while (old_ops && new_i < VEC_length (tree, build_v_must_defs))
870 {
871 act = VEC_index (tree, build_v_must_defs, new_i);
872 new_base = get_name_decl (act);
873 old_base = get_name_decl (MUSTDEF_KILL (old_ops));
874
875 if (old_base == new_base)
876 {
877 /* If variables are the same, reuse this node. */
878 MOVE_HEAD_AFTER (old_ops, last);
879 set_virtual_use_link (MUSTDEF_KILL_PTR (last), stmt);
880 new_i++;
881 }
882 else if (old_base < new_base)
883 {
884 /* If old is less than new, old goes to the free list. */
885 delink_imm_use (MUSTDEF_KILL_PTR (old_ops));
886 MOVE_HEAD_TO_FREELIST (old_ops, mustdef);
887 }
888 else
889 {
890 /* This is a new operand. */
891 add_mustdef_op (stmt, act, &last);
892 new_i++;
893 }
894 }
895
896 /* If there is anything remaining in the build_v_must_defs list, simply emit it. */
897 for ( ; new_i < VEC_length (tree, build_v_must_defs); new_i++)
898 add_mustdef_op (stmt, VEC_index (tree, build_v_must_defs, new_i), &last);
899
900 last->next = NULL;
901
902 /* If there is anything in the old list, free it. */
903 if (old_ops)
904 {
905 for (ptr = old_ops; ptr; ptr = ptr->next)
906 delink_imm_use (MUSTDEF_KILL_PTR (ptr));
907 old_ops->next = free_mustdefs;
908 free_mustdefs = old_ops;
909 }
910
911 /* Now set the stmt's operands. */
912 MUSTDEF_OPS (stmt) = new_list.next;
913
914#ifdef ENABLE_CHECKING
915 {
916 unsigned x = 0;
917 for (ptr = MUSTDEF_OPS (stmt); ptr; ptr = ptr->next)
918 x++;
919
920 gcc_assert (x == VEC_length (tree, build_v_must_defs));
921 }
922#endif
923}
a32b97a2 924
f47c96aa
AM
925static void
926finalize_ssa_v_must_defs (tree stmt)
927{
65ad7c63
DN
928 /* In the presence of subvars, there may be more than one V_MUST_DEF
929 per statement (one for each subvar). It is a bit expensive to
930 verify that all must-defs in a statement belong to subvars if
931 there is more than one must-def, so we don't do it. Suffice to
932 say, if you reach here without having subvars, and have num >1,
933 you have hit a bug. */
f47c96aa 934 finalize_ssa_v_must_def_ops (stmt);
f3940b0e 935 VEC_truncate (tree, build_v_must_defs, 0);
a32b97a2
BB
936}
937
6de9cd9a 938
1a24f92f 939/* Finalize all the build vectors, fill the new ones into INFO. */
f47c96aa 940
1a24f92f 941static inline void
f47c96aa 942finalize_ssa_stmt_operands (tree stmt)
1a24f92f 943{
f47c96aa
AM
944 finalize_ssa_defs (stmt);
945 finalize_ssa_uses (stmt);
946 finalize_ssa_v_must_defs (stmt);
947 finalize_ssa_v_may_defs (stmt);
948 finalize_ssa_vuses (stmt);
6de9cd9a
DN
949}
950
951
1a24f92f
AM
952/* Start the process of building up operands vectors in INFO. */
953
954static inline void
955start_ssa_stmt_operands (void)
6de9cd9a 956{
f3940b0e
AM
957 gcc_assert (VEC_length (tree, build_defs) == 0);
958 gcc_assert (VEC_length (tree, build_uses) == 0);
959 gcc_assert (VEC_length (tree, build_vuses) == 0);
960 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
961 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
6de9cd9a
DN
962}
963
964
1a24f92f 965/* Add DEF_P to the list of pointers to operands. */
6de9cd9a
DN
966
967static inline void
1a24f92f 968append_def (tree *def_p)
6de9cd9a 969{
f3940b0e 970 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
6de9cd9a
DN
971}
972
973
1a24f92f 974/* Add USE_P to the list of pointers to operands. */
6de9cd9a
DN
975
976static inline void
1a24f92f 977append_use (tree *use_p)
6de9cd9a 978{
f3940b0e 979 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
6de9cd9a
DN
980}
981
982
1a24f92f 983/* Add a new virtual may def for variable VAR to the build array. */
6de9cd9a 984
1a24f92f
AM
985static inline void
986append_v_may_def (tree var)
6de9cd9a 987{
f47c96aa
AM
988 if (TREE_CODE (var) != SSA_NAME)
989 {
990 var_ann_t ann = get_var_ann (var);
6de9cd9a 991
f47c96aa
AM
992 /* Don't allow duplicate entries. */
993 if (ann->in_v_may_def_list)
994 return;
995 ann->in_v_may_def_list = 1;
996 }
6de9cd9a 997
f3940b0e 998 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
6de9cd9a
DN
999}
1000
1001
1a24f92f 1002/* Add VAR to the list of virtual uses. */
6de9cd9a 1003
1a24f92f
AM
1004static inline void
1005append_vuse (tree var)
6de9cd9a 1006{
6de9cd9a 1007 /* Don't allow duplicate entries. */
e288e2f5
AM
1008 if (TREE_CODE (var) != SSA_NAME)
1009 {
1010 var_ann_t ann = get_var_ann (var);
1011
1012 if (ann->in_vuse_list || ann->in_v_may_def_list)
1013 return;
1014 ann->in_vuse_list = 1;
1015 }
6de9cd9a 1016
f3940b0e 1017 VEC_safe_push (tree, heap, build_vuses, (tree)var);
6de9cd9a
DN
1018}
1019
a32b97a2 1020
1a24f92f 1021/* Add VAR to the list of virtual must definitions for INFO. */
a32b97a2 1022
1a24f92f
AM
1023static inline void
1024append_v_must_def (tree var)
1025{
1026 unsigned i;
a32b97a2
BB
1027
1028 /* Don't allow duplicate entries. */
f3940b0e
AM
1029 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
1030 if (var == VEC_index (tree, build_v_must_defs, i))
1a24f92f 1031 return;
a32b97a2 1032
f3940b0e 1033 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
a32b97a2
BB
1034}
1035
6de9cd9a 1036
02075bb2
DN
1037/* REF is a tree that contains the entire pointer dereference
1038 expression, if available, or NULL otherwise. ALIAS is the variable
1039 we are asking if REF can access. OFFSET and SIZE come from the
548a6c6d 1040 memory access expression that generated this virtual operand. */
9390c347 1041
02075bb2
DN
1042static bool
1043access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1044 HOST_WIDE_INT size)
1045{
1046 bool offsetgtz = offset > 0;
1047 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1048 tree base = ref ? get_base_address (ref) : NULL;
6de9cd9a 1049
548a6c6d
DN
1050 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1051 using a call-clobbered memory tag. By definition, call-clobbered
1052 memory tags can always touch .GLOBAL_VAR. */
1053 if (alias == global_var)
1054 return true;
1055
b23987ec
DB
1056 /* We cannot prune nonlocal aliases because they are not type
1057 specific. */
1058 if (alias == nonlocal_all)
21392f19
DB
1059 return true;
1060
02075bb2
DN
1061 /* If ALIAS is an SFT, it can't be touched if the offset
1062 and size of the access is not overlapping with the SFT offset and
1063 size. This is only true if we are accessing through a pointer
1064 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1065 be accessing through a pointer to some substruct of the
1066 structure, and if we try to prune there, we will have the wrong
1067 offset, and get the wrong answer.
1068 i.e., we can't prune without more work if we have something like
6de9cd9a 1069
02075bb2
DN
1070 struct gcc_target
1071 {
1072 struct asm_out
1073 {
1074 const char *byte_op;
1075 struct asm_int_op
1076 {
1077 const char *hi;
1078 } aligned_op;
1079 } asm_out;
1080 } targetm;
1081
1082 foo = &targetm.asm_out.aligned_op;
1083 return foo->hi;
6de9cd9a 1084
02075bb2
DN
1085 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1086 terms of SFT_PARENT_VAR, that is where it is.
1087 However, the access through the foo pointer will be at offset 0. */
1088 if (size != -1
1089 && TREE_CODE (alias) == STRUCT_FIELD_TAG
1090 && base
1091 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1092 && !overlap_subvar (offset, size, alias, NULL))
1093 {
1094#ifdef ACCESS_DEBUGGING
1095 fprintf (stderr, "Access to ");
1096 print_generic_expr (stderr, ref, 0);
1097 fprintf (stderr, " may not touch ");
1098 print_generic_expr (stderr, alias, 0);
1099 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1100#endif
1101 return false;
1102 }
6de9cd9a 1103
02075bb2
DN
1104 /* Without strict aliasing, it is impossible for a component access
1105 through a pointer to touch a random variable, unless that
1106 variable *is* a structure or a pointer.
6de9cd9a 1107
02075bb2
DN
1108 That is, given p->c, and some random global variable b,
1109 there is no legal way that p->c could be an access to b.
1110
1111 Without strict aliasing on, we consider it legal to do something
1112 like:
6de9cd9a 1113
02075bb2
DN
1114 struct foos { int l; };
1115 int foo;
1116 static struct foos *getfoo(void);
1117 int main (void)
1118 {
1119 struct foos *f = getfoo();
1120 f->l = 1;
1121 foo = 2;
1122 if (f->l == 1)
1123 abort();
1124 exit(0);
1125 }
1126 static struct foos *getfoo(void)
1127 { return (struct foos *)&foo; }
1128
1129 (taken from 20000623-1.c)
832a0c1d
DB
1130
1131 The docs also say/imply that access through union pointers
1132 is legal (but *not* if you take the address of the union member,
1133 i.e. the inverse), such that you can do
1134
1135 typedef union {
1136 int d;
1137 } U;
1138
1139 int rv;
1140 void breakme()
1141 {
1142 U *rv0;
1143 U *pretmp = (U*)&rv;
1144 rv0 = pretmp;
1145 rv0->d = 42;
1146 }
1147 To implement this, we just punt on accesses through union
1148 pointers entirely.
02075bb2
DN
1149 */
1150 else if (ref
1151 && flag_strict_aliasing
1152 && TREE_CODE (ref) != INDIRECT_REF
1153 && !MTAG_P (alias)
ae536040 1154 && !var_ann (alias)->is_heapvar
832a0c1d
DB
1155 && (TREE_CODE (base) != INDIRECT_REF
1156 || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
02075bb2
DN
1157 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1158 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
aa666e00
AP
1159 /* When the struct has may_alias attached to it, we need not to
1160 return true. */
1161 && get_alias_set (base))
02075bb2
DN
1162 {
1163#ifdef ACCESS_DEBUGGING
1164 fprintf (stderr, "Access to ");
1165 print_generic_expr (stderr, ref, 0);
1166 fprintf (stderr, " may not touch ");
1167 print_generic_expr (stderr, alias, 0);
1168 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1169#endif
1170 return false;
1171 }
6de9cd9a 1172
02075bb2
DN
1173 /* If the offset of the access is greater than the size of one of
1174 the possible aliases, it can't be touching that alias, because it
1175 would be past the end of the structure. */
1176 else if (ref
1177 && flag_strict_aliasing
1178 && TREE_CODE (ref) != INDIRECT_REF
1179 && !MTAG_P (alias)
1180 && !POINTER_TYPE_P (TREE_TYPE (alias))
1181 && offsetgtz
1182 && DECL_SIZE (alias)
1183 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1184 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1185 {
1186#ifdef ACCESS_DEBUGGING
1187 fprintf (stderr, "Access to ");
1188 print_generic_expr (stderr, ref, 0);
1189 fprintf (stderr, " may not touch ");
1190 print_generic_expr (stderr, alias, 0);
1191 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1192#endif
1193 return false;
1194 }
6de9cd9a 1195
02075bb2 1196 return true;
f430bae8
AM
1197}
1198
f430bae8 1199
02075bb2
DN
1200/* Add VAR to the virtual operands array. FLAGS is as in
1201 get_expr_operands. FULL_REF is a tree that contains the entire
1202 pointer dereference expression, if available, or NULL otherwise.
1203 OFFSET and SIZE come from the memory access expression that
1204 generated this virtual operand. FOR_CLOBBER is true is this is
1205 adding a virtual operand for a call clobber. */
1206
1207static void
1208add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1209 tree full_ref, HOST_WIDE_INT offset,
1210 HOST_WIDE_INT size, bool for_clobber)
f430bae8 1211{
02075bb2
DN
1212 VEC(tree,gc) *aliases;
1213 tree sym;
1214 var_ann_t v_ann;
f430bae8 1215
02075bb2
DN
1216 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1217 v_ann = var_ann (sym);
1218
1219 /* Mark statements with volatile operands. Optimizers should back
1220 off from statements having volatile operands. */
1221 if (TREE_THIS_VOLATILE (sym) && s_ann)
1222 s_ann->has_volatile_ops = true;
f430bae8 1223
02075bb2
DN
1224 /* If the variable cannot be modified and this is a V_MAY_DEF change
1225 it into a VUSE. This happens when read-only variables are marked
1226 call-clobbered and/or aliased to writable variables. So we only
1227 check that this only happens on non-specific stores.
1a24f92f 1228
02075bb2
DN
1229 Note that if this is a specific store, i.e. associated with a
1230 modify_expr, then we can't suppress the V_MAY_DEF, lest we run
1231 into validation problems.
1a24f92f 1232
02075bb2
DN
1233 This can happen when programs cast away const, leaving us with a
1234 store to read-only memory. If the statement is actually executed
1235 at runtime, then the program is ill formed. If the statement is
1236 not executed then all is well. At the very least, we cannot ICE. */
1237 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1238 flags &= ~(opf_is_def | opf_kill_def);
1239
1240 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1241 virtual operands, unless the caller has specifically requested
1242 not to add virtual operands (used when adding operands inside an
1243 ADDR_EXPR expression). */
1244 if (flags & opf_no_vops)
f47c96aa 1245 return;
02075bb2
DN
1246
1247 aliases = v_ann->may_aliases;
1248 if (aliases == NULL)
1249 {
1250 /* The variable is not aliased or it is an alias tag. */
1251 if (flags & opf_is_def)
1252 {
1253 if (flags & opf_kill_def)
1254 {
1255 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1256 variable definitions. */
1257 gcc_assert (!MTAG_P (var)
1258 || TREE_CODE (var) == STRUCT_FIELD_TAG);
1259 append_v_must_def (var);
1260 }
1261 else
1262 {
1263 /* Add a V_MAY_DEF for call-clobbered variables and
1264 memory tags. */
1265 append_v_may_def (var);
1266 }
1267 }
1268 else
1269 append_vuse (var);
1270 }
1271 else
1272 {
1273 unsigned i;
1274 tree al;
1275
1276 /* The variable is aliased. Add its aliases to the virtual
1277 operands. */
1278 gcc_assert (VEC_length (tree, aliases) != 0);
1279
1280 if (flags & opf_is_def)
1281 {
1282
1283 bool none_added = true;
f47c96aa 1284
02075bb2
DN
1285 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1286 {
1287 if (!access_can_touch_variable (full_ref, al, offset, size))
1288 continue;
1289
1290 none_added = false;
1291 append_v_may_def (al);
1292 }
f47c96aa 1293
02075bb2
DN
1294 /* If the variable is also an alias tag, add a virtual
1295 operand for it, otherwise we will miss representing
1296 references to the members of the variable's alias set.
1297 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1298
1299 It is also necessary to add bare defs on clobbers for
18cd8a03 1300 SMT's, so that bare SMT uses caused by pruning all the
02075bb2
DN
1301 aliases will link up properly with calls. In order to
1302 keep the number of these bare defs we add down to the
18cd8a03 1303 minimum necessary, we keep track of which SMT's were used
65ad7c63 1304 alone in statement vdefs or VUSEs. */
02075bb2
DN
1305 if (v_ann->is_aliased
1306 || none_added
18cd8a03
DN
1307 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1308 && for_clobber
1309 && SMT_USED_ALONE (var)))
02075bb2 1310 {
18cd8a03 1311 /* Every bare SMT def we add should have SMT_USED_ALONE
02075bb2
DN
1312 set on it, or else we will get the wrong answer on
1313 clobbers. */
18cd8a03
DN
1314 if (none_added
1315 && !updating_used_alone && aliases_computed_p
1316 && TREE_CODE (var) == SYMBOL_MEMORY_TAG)
1317 gcc_assert (SMT_USED_ALONE (var));
f47c96aa 1318
02075bb2
DN
1319 append_v_may_def (var);
1320 }
1321 }
1322 else
1323 {
1324 bool none_added = true;
1325 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1326 {
1327 if (!access_can_touch_variable (full_ref, al, offset, size))
1328 continue;
1329 none_added = false;
1330 append_vuse (al);
1331 }
f47c96aa 1332
02075bb2
DN
1333 /* Similarly, append a virtual uses for VAR itself, when
1334 it is an alias tag. */
1335 if (v_ann->is_aliased || none_added)
1336 append_vuse (var);
1337 }
1338 }
f47c96aa
AM
1339}
1340
f47c96aa 1341
02075bb2
DN
1342/* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1343 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1344 the statement's real operands, otherwise it is added to virtual
1345 operands. */
1346
1347static void
1348add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
f47c96aa 1349{
02075bb2
DN
1350 bool is_real_op;
1351 tree var, sym;
1352 var_ann_t v_ann;
f47c96aa 1353
02075bb2
DN
1354 var = *var_p;
1355 gcc_assert (SSA_VAR_P (var));
f47c96aa 1356
02075bb2 1357 is_real_op = is_gimple_reg (var);
f47c96aa 1358
02075bb2
DN
1359 /* If this is a real operand, the operand is either an SSA name or a
1360 decl. Virtual operands may only be decls. */
1361 gcc_assert (is_real_op || DECL_P (var));
f47c96aa 1362
02075bb2
DN
1363 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1364 v_ann = var_ann (sym);
f47c96aa 1365
02075bb2
DN
1366 /* Mark statements with volatile operands. Optimizers should back
1367 off from statements having volatile operands. */
1368 if (TREE_THIS_VOLATILE (sym) && s_ann)
1369 s_ann->has_volatile_ops = true;
f47c96aa 1370
02075bb2 1371 if (is_real_op)
f47c96aa 1372 {
02075bb2
DN
1373 /* The variable is a GIMPLE register. Add it to real operands. */
1374 if (flags & opf_is_def)
1375 append_def (var_p);
1376 else
1377 append_use (var_p);
f47c96aa 1378 }
02075bb2
DN
1379 else
1380 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1381}
f47c96aa 1382
f47c96aa 1383
02075bb2
DN
1384/* A subroutine of get_expr_operands to handle INDIRECT_REF,
1385 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
f47c96aa 1386
02075bb2
DN
1387 STMT is the statement being processed, EXPR is the INDIRECT_REF
1388 that got us here.
1389
1390 FLAGS is as in get_expr_operands.
1a24f92f 1391
02075bb2
DN
1392 FULL_REF contains the full pointer dereference expression, if we
1393 have it, or NULL otherwise.
1a24f92f 1394
02075bb2
DN
1395 OFFSET and SIZE are the location of the access inside the
1396 dereferenced pointer, if known.
f47c96aa 1397
02075bb2
DN
1398 RECURSE_ON_BASE should be set to true if we want to continue
1399 calling get_expr_operands on the base pointer, and false if
1400 something else will do it for us. */
f47c96aa 1401
02075bb2
DN
1402static void
1403get_indirect_ref_operands (tree stmt, tree expr, int flags,
1404 tree full_ref,
1405 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1406 bool recurse_on_base)
1407{
1408 tree *pptr = &TREE_OPERAND (expr, 0);
1409 tree ptr = *pptr;
1410 stmt_ann_t s_ann = stmt_ann (stmt);
f47c96aa 1411
02075bb2
DN
1412 /* Stores into INDIRECT_REF operands are never killing definitions. */
1413 flags &= ~opf_kill_def;
f47c96aa 1414
02075bb2 1415 if (SSA_VAR_P (ptr))
f47c96aa 1416 {
02075bb2
DN
1417 struct ptr_info_def *pi = NULL;
1418
1419 /* If PTR has flow-sensitive points-to information, use it. */
1420 if (TREE_CODE (ptr) == SSA_NAME
1421 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1422 && pi->name_mem_tag)
f47c96aa 1423 {
02075bb2
DN
1424 /* PTR has its own memory tag. Use it. */
1425 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1426 full_ref, offset, size, false);
f47c96aa 1427 }
02075bb2 1428 else
f47c96aa 1429 {
02075bb2 1430 /* If PTR is not an SSA_NAME or it doesn't have a name
18cd8a03 1431 tag, use its symbol memory tag. */
02075bb2 1432 var_ann_t v_ann;
f47c96aa 1433
02075bb2
DN
1434 /* If we are emitting debugging dumps, display a warning if
1435 PTR is an SSA_NAME with no flow-sensitive alias
1436 information. That means that we may need to compute
1437 aliasing again. */
1438 if (dump_file
1439 && TREE_CODE (ptr) == SSA_NAME
1440 && pi == NULL)
1441 {
1442 fprintf (dump_file,
1443 "NOTE: no flow-sensitive alias info for ");
1444 print_generic_expr (dump_file, ptr, dump_flags);
1445 fprintf (dump_file, " in ");
1446 print_generic_stmt (dump_file, stmt, dump_flags);
1447 }
f430bae8 1448
02075bb2
DN
1449 if (TREE_CODE (ptr) == SSA_NAME)
1450 ptr = SSA_NAME_VAR (ptr);
1451 v_ann = var_ann (ptr);
f430bae8 1452
18cd8a03
DN
1453 if (v_ann->symbol_mem_tag)
1454 add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
02075bb2 1455 full_ref, offset, size, false);
f430bae8
AM
1456 }
1457 }
02075bb2
DN
1458 else if (TREE_CODE (ptr) == INTEGER_CST)
1459 {
1460 /* If a constant is used as a pointer, we can't generate a real
1461 operand for it but we mark the statement volatile to prevent
1462 optimizations from messing things up. */
1463 if (s_ann)
1464 s_ann->has_volatile_ops = true;
1465 return;
1466 }
1467 else
1468 {
1469 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1470 gcc_unreachable ();
1471 }
f430bae8 1472
02075bb2
DN
1473 /* If requested, add a USE operand for the base pointer. */
1474 if (recurse_on_base)
1475 get_expr_operands (stmt, pptr, opf_none);
f430bae8
AM
1476}
1477
643519b7 1478
02075bb2 1479/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
6de9cd9a
DN
1480
1481static void
02075bb2 1482get_tmr_operands (tree stmt, tree expr, int flags)
6de9cd9a 1483{
02075bb2
DN
1484 tree tag = TMR_TAG (expr), ref;
1485 HOST_WIDE_INT offset, size, maxsize;
1486 subvar_t svars, sv;
e288e2f5 1487 stmt_ann_t s_ann = stmt_ann (stmt);
6de9cd9a 1488
02075bb2
DN
1489 /* First record the real operands. */
1490 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1491 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
6de9cd9a 1492
02075bb2
DN
1493 /* MEM_REFs should never be killing. */
1494 flags &= ~opf_kill_def;
6de9cd9a 1495
02075bb2 1496 if (TMR_SYMBOL (expr))
6de9cd9a 1497 {
02075bb2
DN
1498 stmt_ann_t ann = stmt_ann (stmt);
1499 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1500 }
6de9cd9a 1501
02075bb2
DN
1502 if (!tag)
1503 {
1504 /* Something weird, so ensure that we will be careful. */
1505 stmt_ann (stmt)->has_volatile_ops = true;
310de761 1506 return;
02075bb2 1507 }
44de5aeb 1508
02075bb2
DN
1509 if (DECL_P (tag))
1510 {
1511 get_expr_operands (stmt, &tag, flags);
1512 return;
1513 }
643519b7 1514
02075bb2
DN
1515 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1516 gcc_assert (ref != NULL_TREE);
1517 svars = get_subvars_for_var (ref);
1518 for (sv = svars; sv; sv = sv->next)
1519 {
1520 bool exact;
1521 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1522 {
1523 int subvar_flags = flags;
1524 if (!exact || size != maxsize)
1525 subvar_flags &= ~opf_kill_def;
1526 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1527 }
1528 }
1529}
643519b7 1530
7ccf35ed 1531
02075bb2
DN
1532/* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1533 clobbered variables in the function. */
6de9cd9a 1534
02075bb2
DN
1535static void
1536add_call_clobber_ops (tree stmt, tree callee)
1537{
1538 unsigned u;
1539 bitmap_iterator bi;
1540 stmt_ann_t s_ann = stmt_ann (stmt);
1541 bitmap not_read_b, not_written_b;
1542
1543 /* Functions that are not const, pure or never return may clobber
1544 call-clobbered variables. */
1545 if (s_ann)
1546 s_ann->makes_clobbering_call = true;
ac182688 1547
02075bb2
DN
1548 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1549 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1550 if (global_var)
1551 {
1552 add_stmt_operand (&global_var, s_ann, opf_is_def);
6de9cd9a 1553 return;
02075bb2 1554 }
6de9cd9a 1555
02075bb2
DN
1556 /* Get info for local and module level statics. There is a bit
1557 set for each static if the call being processed does not read
1558 or write that variable. */
1559 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1560 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1561 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1562 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1563 {
1564 tree var = referenced_var_lookup (u);
1565 unsigned int escape_mask = var_ann (var)->escape_mask;
1566 tree real_var = var;
1567 bool not_read;
1568 bool not_written;
1569
1570 /* Not read and not written are computed on regular vars, not
1571 subvars, so look at the parent var if this is an SFT. */
1572 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1573 real_var = SFT_PARENT_VAR (var);
1574
1575 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1576 DECL_UID (real_var)) : false;
1577 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1578 DECL_UID (real_var)) : false;
1579 gcc_assert (!unmodifiable_var_p (var));
1580
1581 clobber_stats.clobbered_vars++;
1582
1583 /* See if this variable is really clobbered by this function. */
1584
1585 /* Trivial case: Things escaping only to pure/const are not
1586 clobbered by non-pure-const, and only read by pure/const. */
1587 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1588 {
1589 tree call = get_call_expr_in (stmt);
1590 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1591 {
1592 add_stmt_operand (&var, s_ann, opf_none);
1593 clobber_stats.unescapable_clobbers_avoided++;
1594 continue;
1595 }
1596 else
1597 {
1598 clobber_stats.unescapable_clobbers_avoided++;
1599 continue;
1600 }
1601 }
1602
1603 if (not_written)
1604 {
1605 clobber_stats.static_write_clobbers_avoided++;
1606 if (!not_read)
1607 add_stmt_operand (&var, s_ann, opf_none);
1608 else
1609 clobber_stats.static_read_clobbers_avoided++;
1610 }
1611 else
65ad7c63 1612 add_virtual_operand (var, s_ann, opf_is_def, NULL, 0, -1, true);
02075bb2 1613 }
02075bb2
DN
1614}
1615
1616
1617/* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1618 function. */
1619
1620static void
1621add_call_read_ops (tree stmt, tree callee)
1622{
1623 unsigned u;
1624 bitmap_iterator bi;
1625 stmt_ann_t s_ann = stmt_ann (stmt);
1626 bitmap not_read_b;
1627
1628 /* if the function is not pure, it may reference memory. Add
1629 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1630 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1631 if (global_var)
1632 {
1633 add_stmt_operand (&global_var, s_ann, opf_none);
1634 return;
1635 }
1636
1637 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1638
1639 /* Add a VUSE for each call-clobbered variable. */
1640 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1641 {
1642 tree var = referenced_var (u);
1643 tree real_var = var;
1644 bool not_read;
1645
1646 clobber_stats.readonly_clobbers++;
1647
1648 /* Not read and not written are computed on regular vars, not
1649 subvars, so look at the parent var if this is an SFT. */
1650
1651 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1652 real_var = SFT_PARENT_VAR (var);
1653
65ad7c63
DN
1654 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1655 : false;
02075bb2
DN
1656
1657 if (not_read)
1658 {
1659 clobber_stats.static_readonly_clobbers_avoided++;
1660 continue;
1661 }
1662
1663 add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
1664 }
1665}
1666
1667
1668/* A subroutine of get_expr_operands to handle CALL_EXPR. */
1669
1670static void
1671get_call_expr_operands (tree stmt, tree expr)
1672{
1673 tree op;
1674 int call_flags = call_expr_flags (expr);
1675
1676 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1677 operands for all the symbols that have been found to be
1678 call-clobbered.
1679
1680 Note that if aliases have not been computed, the global effects
1681 of calls will not be included in the SSA web. This is fine
1682 because no optimizer should run before aliases have been
1683 computed. By not bothering with virtual operands for CALL_EXPRs
1684 we avoid adding superfluous virtual operands, which can be a
1685 significant compile time sink (See PR 15855). */
1686 if (aliases_computed_p
1687 && !bitmap_empty_p (call_clobbered_vars)
1688 && !(call_flags & ECF_NOVOPS))
1689 {
1690 /* A 'pure' or a 'const' function never call-clobbers anything.
1691 A 'noreturn' function might, but since we don't return anyway
1692 there is no point in recording that. */
1693 if (TREE_SIDE_EFFECTS (expr)
1694 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1695 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1696 else if (!(call_flags & ECF_CONST))
1697 add_call_read_ops (stmt, get_callee_fndecl (expr));
1698 }
1699
1700 /* Find uses in the called function. */
1701 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1702
1703 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1704 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1705
1706 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1707}
1708
1709
1710/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1711
1712static void
1713get_asm_expr_operands (tree stmt)
1714{
1715 stmt_ann_t s_ann = stmt_ann (stmt);
1716 int noutputs = list_length (ASM_OUTPUTS (stmt));
1717 const char **oconstraints
1718 = (const char **) alloca ((noutputs) * sizeof (const char *));
1719 int i;
1720 tree link;
1721 const char *constraint;
1722 bool allows_mem, allows_reg, is_inout;
1723
1724 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1725 {
65ad7c63
DN
1726 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1727 oconstraints[i] = constraint;
1728 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1729 &allows_reg, &is_inout);
02075bb2
DN
1730
1731 /* This should have been split in gimplify_asm_expr. */
1732 gcc_assert (!allows_reg || !is_inout);
1733
1734 /* Memory operands are addressable. Note that STMT needs the
1735 address of this operand. */
1736 if (!allows_reg && allows_mem)
1737 {
1738 tree t = get_base_address (TREE_VALUE (link));
1739 if (t && DECL_P (t) && s_ann)
1740 add_to_addressable_set (t, &s_ann->addresses_taken);
1741 }
1742
1743 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1744 }
1745
1746 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1747 {
1748 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1749 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1750 oconstraints, &allows_mem, &allows_reg);
1751
1752 /* Memory operands are addressable. Note that STMT needs the
1753 address of this operand. */
1754 if (!allows_reg && allows_mem)
1755 {
1756 tree t = get_base_address (TREE_VALUE (link));
1757 if (t && DECL_P (t) && s_ann)
1758 add_to_addressable_set (t, &s_ann->addresses_taken);
1759 }
1760
1761 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1762 }
1763
1764
1765 /* Clobber memory for asm ("" : : : "memory"); */
1766 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1767 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1768 {
1769 unsigned i;
1770 bitmap_iterator bi;
1771
1772 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1773 decided to group them). */
1774 if (global_var)
1775 add_stmt_operand (&global_var, s_ann, opf_is_def);
1776 else
1777 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1778 {
1779 tree var = referenced_var (i);
1780 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1781 }
1782
1783 /* Now clobber all addressables. */
1784 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1785 {
1786 tree var = referenced_var (i);
1787
1788 /* Subvars are explicitly represented in this list, so
1789 we don't need the original to be added to the clobber
1790 ops, but the original *will* be in this list because
1791 we keep the addressability of the original
1792 variable up-to-date so we don't screw up the rest of
1793 the backend. */
1794 if (var_can_have_subvars (var)
1795 && get_subvars_for_var (var) != NULL)
1796 continue;
1797
1798 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1799 }
1800
1801 break;
1802 }
1803}
1804
1805
65ad7c63
DN
1806/* Scan operands for the assignment expression EXPR in statement STMT. */
1807
1808static void
1809get_modify_expr_operands (tree stmt, tree expr)
1810{
1811 /* First get operands from the RHS. */
1812 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1813
1814 /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE
1815 registers. If the LHS is a store to memory, we will either need
1816 a preserving definition (V_MAY_DEF) or a killing definition
1817 (V_MUST_DEF).
1818
1819 Preserving definitions are those that modify a part of an
1820 aggregate object for which no subvars have been computed (or the
1821 reference does not correspond exactly to one of them). Stores
1822 through a pointer are also represented with V_MAY_DEF operators.
1823
1824 The determination of whether to use a preserving or a killing
1825 definition is done while scanning the LHS of the assignment. By
1826 default, assume that we will emit a V_MUST_DEF. */
1827 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def|opf_kill_def);
1828}
1829
1830
02075bb2 1831/* Recursively scan the expression pointed to by EXPR_P in statement
65ad7c63
DN
1832 STMT. FLAGS is one of the OPF_* constants modifying how to
1833 interpret the operands found. */
02075bb2
DN
1834
1835static void
1836get_expr_operands (tree stmt, tree *expr_p, int flags)
1837{
1838 enum tree_code code;
1839 enum tree_code_class class;
1840 tree expr = *expr_p;
1841 stmt_ann_t s_ann = stmt_ann (stmt);
1842
1843 if (expr == NULL)
1844 return;
1845
1846 code = TREE_CODE (expr);
1847 class = TREE_CODE_CLASS (code);
1848
1849 switch (code)
1850 {
1851 case ADDR_EXPR:
1852 /* Taking the address of a variable does not represent a
1853 reference to it, but the fact that the statement takes its
1854 address will be of interest to some passes (e.g. alias
1855 resolution). */
1856 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
1857
1858 /* If the address is invariant, there may be no interesting
1859 variable references inside. */
1860 if (is_gimple_min_invariant (expr))
1861 return;
1862
1863 /* Otherwise, there may be variables referenced inside but there
1864 should be no VUSEs created, since the referenced objects are
1865 not really accessed. The only operands that we should find
1866 here are ARRAY_REF indices which will always be real operands
1867 (GIMPLE does not allow non-registers as array indices). */
1868 flags |= opf_no_vops;
1869 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1870 return;
1871
1872 case SSA_NAME:
1873 case STRUCT_FIELD_TAG:
18cd8a03 1874 case SYMBOL_MEMORY_TAG:
02075bb2
DN
1875 case NAME_MEMORY_TAG:
1876 add_stmt_operand (expr_p, s_ann, flags);
1877 return;
1878
1879 case VAR_DECL:
1880 case PARM_DECL:
1881 case RESULT_DECL:
1882 {
1883 subvar_t svars;
1884
65ad7c63 1885 /* Add the subvars for a variable, if it has subvars, to DEFS
02075bb2
DN
1886 or USES. Otherwise, add the variable itself. Whether it
1887 goes to USES or DEFS depends on the operand flags. */
1888 if (var_can_have_subvars (expr)
1889 && (svars = get_subvars_for_var (expr)))
1890 {
1891 subvar_t sv;
1892 for (sv = svars; sv; sv = sv->next)
1893 add_stmt_operand (&sv->var, s_ann, flags);
1894 }
1895 else
1896 add_stmt_operand (expr_p, s_ann, flags);
1897
1898 return;
1899 }
1900
1901 case MISALIGNED_INDIRECT_REF:
1902 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1903 /* fall through */
1904
1905 case ALIGN_INDIRECT_REF:
1906 case INDIRECT_REF:
65ad7c63 1907 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
02075bb2
DN
1908 return;
1909
1910 case TARGET_MEM_REF:
1911 get_tmr_operands (stmt, expr, flags);
1912 return;
1913
02075bb2 1914 case ARRAY_REF:
65ad7c63 1915 case ARRAY_RANGE_REF:
02075bb2
DN
1916 case COMPONENT_REF:
1917 case REALPART_EXPR:
1918 case IMAGPART_EXPR:
1919 {
c75ab022 1920 tree ref;
6bec9271 1921 HOST_WIDE_INT offset, size, maxsize;
758cf3f2 1922 bool none = true;
c75ab022 1923
643519b7
DN
1924 /* This component reference becomes an access to all of the
1925 subvariables it can touch, if we can determine that, but
1926 *NOT* the real one. If we can't determine which fields we
1927 could touch, the recursion will eventually get to a
1928 variable and add *all* of its subvars, or whatever is the
1929 minimum correct subset. */
6bec9271
RG
1930 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1931 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
643519b7 1932 {
c75ab022 1933 subvar_t sv;
643519b7
DN
1934 subvar_t svars = get_subvars_for_var (ref);
1935
c75ab022
DB
1936 for (sv = svars; sv; sv = sv->next)
1937 {
1938 bool exact;
643519b7 1939
3c0b6c43 1940 if (overlap_subvar (offset, maxsize, sv->var, &exact))
c75ab022 1941 {
98b6d477 1942 int subvar_flags = flags;
758cf3f2 1943 none = false;
643519b7 1944 if (!exact || size != maxsize)
7fac66d4
JH
1945 subvar_flags &= ~opf_kill_def;
1946 add_stmt_operand (&sv->var, s_ann, subvar_flags);
c75ab022
DB
1947 }
1948 }
643519b7 1949
758cf3f2
RG
1950 if (!none)
1951 flags |= opf_no_vops;
c75ab022 1952 }
3c0b6c43
DB
1953 else if (TREE_CODE (ref) == INDIRECT_REF)
1954 {
65ad7c63
DN
1955 get_indirect_ref_operands (stmt, ref, flags, expr, offset,
1956 maxsize, false);
3c0b6c43
DB
1957 flags |= opf_no_vops;
1958 }
758cf3f2
RG
1959
1960 /* Even if we found subvars above we need to ensure to see
1961 immediate uses for d in s.a[d]. In case of s.a having
65ad7c63 1962 a subvar or we would miss it otherwise. */
643519b7 1963 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
758cf3f2 1964 flags & ~opf_kill_def);
c75ab022
DB
1965
1966 if (code == COMPONENT_REF)
305a1321 1967 {
707db096 1968 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
305a1321
MM
1969 s_ann->has_volatile_ops = true;
1970 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1971 }
65ad7c63 1972 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
a916f21d
RG
1973 {
1974 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1975 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1976 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1977 }
643519b7 1978
c75ab022
DB
1979 return;
1980 }
643519b7 1981
d25cee4d 1982 case WITH_SIZE_EXPR:
0e28378a 1983 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
d25cee4d 1984 and an rvalue reference to its second argument. */
1a24f92f
AM
1985 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1986 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
d25cee4d
RH
1987 return;
1988
310de761 1989 case CALL_EXPR:
1a24f92f 1990 get_call_expr_operands (stmt, expr);
6de9cd9a 1991 return;
6de9cd9a 1992
40923b20 1993 case COND_EXPR:
ad9f20cb
DP
1994 case VEC_COND_EXPR:
1995 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
40923b20
DP
1996 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1997 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1998 return;
1999
310de761 2000 case MODIFY_EXPR:
65ad7c63
DN
2001 get_modify_expr_operands (stmt, expr);
2002 return;
6de9cd9a 2003
7b48e1e0
RH
2004 case CONSTRUCTOR:
2005 {
2006 /* General aggregate CONSTRUCTORs have been decomposed, but they
2007 are still in use as the COMPLEX_EXPR equivalent for vectors. */
4038c495
GB
2008 constructor_elt *ce;
2009 unsigned HOST_WIDE_INT idx;
7b48e1e0 2010
4038c495
GB
2011 for (idx = 0;
2012 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
2013 idx++)
2014 get_expr_operands (stmt, &ce->value, opf_none);
7b48e1e0
RH
2015
2016 return;
2017 }
2018
310de761 2019 case BIT_FIELD_REF:
65ad7c63
DN
2020 /* Stores using BIT_FIELD_REF are always preserving definitions. */
2021 flags &= ~opf_kill_def;
2022
2023 /* Fallthru */
2024
2025 case TRUTH_NOT_EXPR:
4626c433 2026 case VIEW_CONVERT_EXPR:
310de761 2027 do_unary:
1a24f92f 2028 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
6de9cd9a 2029 return;
6de9cd9a 2030
310de761
RH
2031 case TRUTH_AND_EXPR:
2032 case TRUTH_OR_EXPR:
2033 case TRUTH_XOR_EXPR:
2034 case COMPOUND_EXPR:
2035 case OBJ_TYPE_REF:
0bca51f0 2036 case ASSERT_EXPR:
310de761
RH
2037 do_binary:
2038 {
1a24f92f
AM
2039 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2040 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
310de761
RH
2041 return;
2042 }
2043
20f06221 2044 case DOT_PROD_EXPR:
7ccf35ed
DN
2045 case REALIGN_LOAD_EXPR:
2046 {
2047 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2048 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2049 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2050 return;
2051 }
2052
310de761
RH
2053 case BLOCK:
2054 case FUNCTION_DECL:
2055 case EXC_PTR_EXPR:
2056 case FILTER_EXPR:
2057 case LABEL_DECL:
243cdfa8 2058 case CONST_DECL:
50674e96
DN
2059 case OMP_PARALLEL:
2060 case OMP_SECTIONS:
2061 case OMP_FOR:
50674e96
DN
2062 case OMP_SINGLE:
2063 case OMP_MASTER:
2064 case OMP_ORDERED:
2065 case OMP_CRITICAL:
777f7f9a
RH
2066 case OMP_RETURN:
2067 case OMP_CONTINUE:
02075bb2 2068 /* Expressions that make no memory references. */
310de761 2069 return;
02075bb2
DN
2070
2071 default:
2072 if (class == tcc_unary)
2073 goto do_unary;
2074 if (class == tcc_binary || class == tcc_comparison)
2075 goto do_binary;
2076 if (class == tcc_constant || class == tcc_type)
2077 return;
643519b7 2078 }
310de761 2079
02075bb2
DN
2080 /* If we get here, something has gone wrong. */
2081#ifdef ENABLE_CHECKING
2082 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2083 debug_tree (expr);
2084 fputs ("\n", stderr);
2085#endif
2086 gcc_unreachable ();
310de761
RH
2087}
2088
643519b7 2089
65ad7c63
DN
2090/* Parse STMT looking for operands. When finished, the various
2091 build_* operand vectors will have potential operands in them. */
2092
ac182688 2093static void
02075bb2 2094parse_ssa_operands (tree stmt)
ac182688 2095{
02075bb2 2096 enum tree_code code;
ac182688 2097
02075bb2
DN
2098 code = TREE_CODE (stmt);
2099 switch (code)
2100 {
2101 case MODIFY_EXPR:
65ad7c63 2102 get_modify_expr_operands (stmt, stmt);
02075bb2
DN
2103 break;
2104
2105 case COND_EXPR:
2106 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
2107 break;
2108
2109 case SWITCH_EXPR:
2110 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
2111 break;
2112
2113 case ASM_EXPR:
2114 get_asm_expr_operands (stmt);
2115 break;
2116
2117 case RETURN_EXPR:
2118 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
2119 break;
2120
2121 case GOTO_EXPR:
2122 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
2123 break;
2124
2125 case LABEL_EXPR:
2126 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
2127 break;
2128
02075bb2
DN
2129 case BIND_EXPR:
2130 case CASE_LABEL_EXPR:
2131 case TRY_CATCH_EXPR:
2132 case TRY_FINALLY_EXPR:
2133 case EH_FILTER_EXPR:
2134 case CATCH_EXPR:
2135 case RESX_EXPR:
65ad7c63 2136 /* These nodes contain no variable references. */
02075bb2
DN
2137 break;
2138
2139 default:
65ad7c63
DN
2140 /* Notice that if get_expr_operands tries to use &STMT as the
2141 operand pointer (which may only happen for USE operands), we
2142 will fail in add_stmt_operand. This default will handle
2143 statements like empty statements, or CALL_EXPRs that may
2144 appear on the RHS of a statement or as statements themselves. */
02075bb2
DN
2145 get_expr_operands (stmt, &stmt, opf_none);
2146 break;
9be7ee44 2147 }
ac182688
ZD
2148}
2149
643519b7 2150
02075bb2 2151/* Create an operands cache for STMT. */
310de761
RH
2152
2153static void
02075bb2 2154build_ssa_operands (tree stmt)
310de761 2155{
02075bb2
DN
2156 stmt_ann_t ann = get_stmt_ann (stmt);
2157
2158 /* Initially assume that the statement has no volatile operands. */
2159 if (ann)
2160 ann->has_volatile_ops = false;
310de761 2161
02075bb2 2162 start_ssa_stmt_operands ();
e288e2f5 2163
02075bb2
DN
2164 parse_ssa_operands (stmt);
2165 operand_build_sort_virtual (build_vuses);
2166 operand_build_sort_virtual (build_v_may_defs);
2167 operand_build_sort_virtual (build_v_must_defs);
e288e2f5 2168
02075bb2
DN
2169 finalize_ssa_stmt_operands (stmt);
2170}
e288e2f5 2171
e288e2f5 2172
02075bb2 2173/* Free any operands vectors in OPS. */
65ad7c63 2174
02075bb2
DN
2175void
2176free_ssa_operands (stmt_operands_p ops)
2177{
2178 ops->def_ops = NULL;
2179 ops->use_ops = NULL;
2180 ops->maydef_ops = NULL;
2181 ops->mustdef_ops = NULL;
2182 ops->vuse_ops = NULL;
310de761
RH
2183}
2184
3c0b6c43 2185
2434ab1d 2186/* Get the operands of statement STMT. */
643519b7 2187
02075bb2
DN
2188void
2189update_stmt_operands (tree stmt)
2190{
2191 stmt_ann_t ann = get_stmt_ann (stmt);
3c0b6c43 2192
65ad7c63
DN
2193 /* If update_stmt_operands is called before SSA is initialized, do
2194 nothing. */
02075bb2
DN
2195 if (!ssa_operands_active ())
2196 return;
943261d7 2197
02075bb2
DN
2198 /* The optimizers cannot handle statements that are nothing but a
2199 _DECL. This indicates a bug in the gimplifier. */
2200 gcc_assert (!SSA_VAR_P (stmt));
6de9cd9a 2201
02075bb2 2202 gcc_assert (ann->modified);
643519b7 2203
02075bb2 2204 timevar_push (TV_TREE_OPS);
943261d7 2205
02075bb2 2206 build_ssa_operands (stmt);
643519b7 2207
65ad7c63 2208 /* Clear the modified bit for STMT. */
02075bb2 2209 ann->modified = 0;
6de9cd9a 2210
02075bb2
DN
2211 timevar_pop (TV_TREE_OPS);
2212}
faf7c678 2213
65ad7c63 2214
02075bb2 2215/* Copies virtual operands from SRC to DST. */
3c0b6c43 2216
02075bb2
DN
2217void
2218copy_virtual_operands (tree dest, tree src)
6de9cd9a 2219{
02075bb2
DN
2220 tree t;
2221 ssa_op_iter iter, old_iter;
2222 use_operand_p use_p, u2;
2223 def_operand_p def_p, d2;
6de9cd9a 2224
02075bb2 2225 build_ssa_operands (dest);
0d2bf6f0 2226
02075bb2
DN
2227 /* Copy all the virtual fields. */
2228 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
2229 append_vuse (t);
2230 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
2231 append_v_may_def (t);
2232 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
2233 append_v_must_def (t);
0d2bf6f0 2234
02075bb2
DN
2235 if (VEC_length (tree, build_vuses) == 0
2236 && VEC_length (tree, build_v_may_defs) == 0
2237 && VEC_length (tree, build_v_must_defs) == 0)
3c0b6c43 2238 return;
02075bb2
DN
2239
2240 /* Now commit the virtual operands to this stmt. */
2241 finalize_ssa_v_must_defs (dest);
2242 finalize_ssa_v_may_defs (dest);
2243 finalize_ssa_vuses (dest);
2244
2245 /* Finally, set the field to the same values as then originals. */
02075bb2
DN
2246 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
2247 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
6de9cd9a 2248 {
02075bb2
DN
2249 gcc_assert (!op_iter_done (&old_iter));
2250 SET_USE (use_p, t);
2251 t = op_iter_next_tree (&old_iter);
6de9cd9a 2252 }
02075bb2
DN
2253 gcc_assert (op_iter_done (&old_iter));
2254
2255 op_iter_init_maydef (&old_iter, src, &u2, &d2);
2256 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
6de9cd9a 2257 {
02075bb2
DN
2258 gcc_assert (!op_iter_done (&old_iter));
2259 SET_USE (use_p, USE_FROM_PTR (u2));
2260 SET_DEF (def_p, DEF_FROM_PTR (d2));
2261 op_iter_next_maymustdef (&u2, &d2, &old_iter);
2262 }
2263 gcc_assert (op_iter_done (&old_iter));
6de9cd9a 2264
02075bb2
DN
2265 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
2266 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
2267 {
2268 gcc_assert (!op_iter_done (&old_iter));
2269 SET_USE (use_p, USE_FROM_PTR (u2));
2270 SET_DEF (def_p, DEF_FROM_PTR (d2));
2271 op_iter_next_maymustdef (&u2, &d2, &old_iter);
2272 }
2273 gcc_assert (op_iter_done (&old_iter));
6de9cd9a 2274
02075bb2 2275}
a6c550f9 2276
a6c550f9 2277
02075bb2
DN
2278/* Specifically for use in DOM's expression analysis. Given a store, we
2279 create an artificial stmt which looks like a load from the store, this can
2280 be used to eliminate redundant loads. OLD_OPS are the operands from the
2281 store stmt, and NEW_STMT is the new load which represents a load of the
2282 values stored. */
2283
2284void
2285create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
2286{
2287 stmt_ann_t ann;
2288 tree op;
2289 ssa_op_iter iter;
2290 use_operand_p use_p;
2291 unsigned x;
2292
2293 ann = get_stmt_ann (new_stmt);
2294
65ad7c63 2295 /* Process the stmt looking for operands. */
02075bb2
DN
2296 start_ssa_stmt_operands ();
2297 parse_ssa_operands (new_stmt);
a6c550f9 2298
02075bb2
DN
2299 for (x = 0; x < VEC_length (tree, build_vuses); x++)
2300 {
2301 tree t = VEC_index (tree, build_vuses, x);
2302 if (TREE_CODE (t) != SSA_NAME)
2303 {
2304 var_ann_t ann = var_ann (t);
2305 ann->in_vuse_list = 0;
6de9cd9a 2306 }
02075bb2
DN
2307 }
2308
2309 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
2310 {
2311 tree t = VEC_index (tree, build_v_may_defs, x);
2312 if (TREE_CODE (t) != SSA_NAME)
6de9cd9a 2313 {
02075bb2
DN
2314 var_ann_t ann = var_ann (t);
2315 ann->in_v_may_def_list = 0;
6de9cd9a
DN
2316 }
2317 }
6de9cd9a 2318
02075bb2
DN
2319 /* Remove any virtual operands that were found. */
2320 VEC_truncate (tree, build_v_may_defs, 0);
2321 VEC_truncate (tree, build_v_must_defs, 0);
2322 VEC_truncate (tree, build_vuses, 0);
faf7c678 2323
02075bb2
DN
2324 /* For each VDEF on the original statement, we want to create a
2325 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
2326 statement. */
2327 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
2328 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
2329 append_vuse (op);
2330
2331 /* Now build the operands for this new stmt. */
2332 finalize_ssa_stmt_operands (new_stmt);
3c0b6c43 2333
02075bb2
DN
2334 /* All uses in this fake stmt must not be in the immediate use lists. */
2335 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2336 delink_imm_use (use_p);
2337}
3c0b6c43 2338
3c0b6c43 2339
02075bb2
DN
2340/* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2341 to test the validity of the swap operation. */
faf7c678 2342
02075bb2
DN
2343void
2344swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2345{
2346 tree op0, op1;
2347 op0 = *exp0;
2348 op1 = *exp1;
3c0b6c43 2349
65ad7c63
DN
2350 /* If the operand cache is active, attempt to preserve the relative
2351 positions of these two operands in their respective immediate use
2352 lists. */
02075bb2
DN
2353 if (ssa_operands_active () && op0 != op1)
2354 {
2355 use_optype_p use0, use1, ptr;
2356 use0 = use1 = NULL;
3c0b6c43 2357
02075bb2
DN
2358 /* Find the 2 operands in the cache, if they are there. */
2359 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2360 if (USE_OP_PTR (ptr)->use == exp0)
2361 {
2362 use0 = ptr;
2363 break;
2364 }
3c0b6c43 2365
02075bb2
DN
2366 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2367 if (USE_OP_PTR (ptr)->use == exp1)
2368 {
2369 use1 = ptr;
2370 break;
2371 }
2372
2373 /* If both uses don't have operand entries, there isn't much we can do
65ad7c63 2374 at this point. Presumably we don't need to worry about it. */
02075bb2
DN
2375 if (use0 && use1)
2376 {
2377 tree *tmp = USE_OP_PTR (use1)->use;
2378 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2379 USE_OP_PTR (use0)->use = tmp;
2380 }
3c0b6c43 2381 }
02075bb2
DN
2382
2383 /* Now swap the data. */
2384 *exp0 = op1;
2385 *exp1 = op0;
3c0b6c43
DB
2386}
2387
643519b7 2388
e8ca4159
DN
2389/* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2390 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2391 a single variable whose address has been taken or any other valid
2392 GIMPLE memory reference (structure reference, array, etc). If the
2393 base address of REF is a decl that has sub-variables, also add all
2394 of its sub-variables. */
6de9cd9a 2395
e8ca4159
DN
2396void
2397add_to_addressable_set (tree ref, bitmap *addresses_taken)
6de9cd9a 2398{
e8ca4159 2399 tree var;
c75ab022 2400 subvar_t svars;
c75ab022 2401
e8ca4159
DN
2402 gcc_assert (addresses_taken);
2403
23e66a36 2404 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
e8ca4159
DN
2405 as the only thing we take the address of. If VAR is a structure,
2406 taking the address of a field means that the whole structure may
2407 be referenced using pointer arithmetic. See PR 21407 and the
2408 ensuing mailing list discussion. */
2409 var = get_base_address (ref);
6de9cd9a
DN
2410 if (var && SSA_VAR_P (var))
2411 {
e8ca4159
DN
2412 if (*addresses_taken == NULL)
2413 *addresses_taken = BITMAP_GGC_ALLOC ();
c75ab022 2414
c75ab022
DB
2415 if (var_can_have_subvars (var)
2416 && (svars = get_subvars_for_var (var)))
2417 {
2418 subvar_t sv;
2419 for (sv = svars; sv; sv = sv->next)
e8ca4159
DN
2420 {
2421 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2422 TREE_ADDRESSABLE (sv->var) = 1;
2423 }
c75ab022 2424 }
9044951e 2425 else
e8ca4159
DN
2426 {
2427 bitmap_set_bit (*addresses_taken, DECL_UID (var));
2428 TREE_ADDRESSABLE (var) = 1;
2429 }
6de9cd9a
DN
2430 }
2431}
2432
643519b7 2433
f430bae8 2434/* Scan the immediate_use list for VAR making sure its linked properly.
65ad7c63 2435 Return TRUE if there is a problem and emit an error message to F. */
f430bae8
AM
2436
2437bool
2438verify_imm_links (FILE *f, tree var)
2439{
f47c96aa 2440 use_operand_p ptr, prev, list;
f430bae8
AM
2441 int count;
2442
2443 gcc_assert (TREE_CODE (var) == SSA_NAME);
2444
2445 list = &(SSA_NAME_IMM_USE_NODE (var));
2446 gcc_assert (list->use == NULL);
2447
2448 if (list->prev == NULL)
2449 {
2450 gcc_assert (list->next == NULL);
2451 return false;
2452 }
2453
2454 prev = list;
2455 count = 0;
2456 for (ptr = list->next; ptr != list; )
2457 {
2458 if (prev != ptr->prev)
0e61db61
NS
2459 goto error;
2460
f430bae8 2461 if (ptr->use == NULL)
0e61db61
NS
2462 goto error; /* 2 roots, or SAFE guard node. */
2463 else if (*(ptr->use) != var)
2464 goto error;
f430bae8
AM
2465
2466 prev = ptr;
2467 ptr = ptr->next;
643519b7
DN
2468
2469 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2470 problem. */
e84d8064 2471 if (count++ > 50000000)
0e61db61 2472 goto error;
f430bae8
AM
2473 }
2474
2475 /* Verify list in the other direction. */
2476 prev = list;
2477 for (ptr = list->prev; ptr != list; )
2478 {
2479 if (prev != ptr->next)
0e61db61 2480 goto error;
f430bae8
AM
2481 prev = ptr;
2482 ptr = ptr->prev;
2483 if (count-- < 0)
0e61db61 2484 goto error;
f430bae8
AM
2485 }
2486
2487 if (count != 0)
0e61db61 2488 goto error;
f430bae8
AM
2489
2490 return false;
0e61db61
NS
2491
2492 error:
2493 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2494 {
2495 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2496 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2497 }
2498 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2499 (void *)ptr->use);
2500 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2501 fprintf(f, "\n");
2502 return true;
f430bae8
AM
2503}
2504
2505
2506/* Dump all the immediate uses to FILE. */
2507
2508void
2509dump_immediate_uses_for (FILE *file, tree var)
2510{
2511 imm_use_iterator iter;
2512 use_operand_p use_p;
2513
2514 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2515
2516 print_generic_expr (file, var, TDF_SLIM);
2517 fprintf (file, " : -->");
2518 if (has_zero_uses (var))
2519 fprintf (file, " no uses.\n");
2520 else
2521 if (has_single_use (var))
2522 fprintf (file, " single use.\n");
2523 else
2524 fprintf (file, "%d uses.\n", num_imm_uses (var));
2525
2526 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2527 {
afd83fe4
AM
2528 if (use_p->stmt == NULL && use_p->use == NULL)
2529 fprintf (file, "***end of stmt iterator marker***\n");
f47c96aa 2530 else
afd83fe4
AM
2531 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2532 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2533 else
2534 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
f430bae8
AM
2535 }
2536 fprintf(file, "\n");
2537}
2538
643519b7 2539
f430bae8
AM
2540/* Dump all the immediate uses to FILE. */
2541
2542void
2543dump_immediate_uses (FILE *file)
2544{
2545 tree var;
2546 unsigned int x;
2547
2548 fprintf (file, "Immediate_uses: \n\n");
2549 for (x = 1; x < num_ssa_names; x++)
2550 {
2551 var = ssa_name(x);
2552 if (!var)
2553 continue;
2554 dump_immediate_uses_for (file, var);
2555 }
2556}
2557
2558
2559/* Dump def-use edges on stderr. */
2560
2561void
2562debug_immediate_uses (void)
2563{
2564 dump_immediate_uses (stderr);
2565}
2566
65ad7c63 2567
f430bae8
AM
2568/* Dump def-use edges on stderr. */
2569
2570void
2571debug_immediate_uses_for (tree var)
2572{
2573 dump_immediate_uses_for (stderr, var);
1a24f92f 2574}
643519b7 2575
6de9cd9a 2576#include "gt-tree-ssa-operands.h"
This page took 1.605004 seconds and 5 git commands to generate.