]> gcc.gnu.org Git - gcc.git/blame - gcc/tree-ssa-operands.c
c-common.c (get_priority): Add check for SUPPORTS_INIT_PRIORITY.
[gcc.git] / gcc / tree-ssa-operands.c
CommitLineData
6de9cd9a 1/* SSA operands management for trees.
b2bcf557 2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
6de9cd9a
DN
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING. If not, write to
366ccddb
KC
18the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19Boston, MA 02110-1301, USA. */
6de9cd9a
DN
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "flags.h"
27#include "function.h"
28#include "diagnostic.h"
29#include "tree-flow.h"
30#include "tree-inline.h"
31#include "tree-pass.h"
32#include "ggc.h"
33#include "timevar.h"
4c714dd4 34#include "toplev.h"
6674a6ce 35#include "langhooks.h"
ea900239 36#include "ipa-reference.h"
1a24f92f 37
6cb38cd4 38/* This file contains the code required to manage the operands cache of the
1a24f92f 39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
6cb38cd4 40 annotation. This cache contains operands that will be of interest to
1a24f92f
AM
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
1a24f92f 53 The operand tree is the parsed by the various get_* routines which look
2a7e31df 54 through the stmt tree for the occurrence of operands which may be of
1a24f92f 55 interest, and calls are made to the append_* routines whenever one is
38635499
DN
56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
1a24f92f
AM
58
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
61
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
38635499 64 on each of the 4 operand vectors which have been built up.
1a24f92f
AM
65
66 If the stmt had a previous operand cache, the finalization routines
f3b569ca 67 attempt to match up the new operands with the old ones. If it's a perfect
1a24f92f
AM
68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
73
454ff5cb 74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
1a24f92f 75 vector for VUSE, then the new vector will also be modified such that
02075bb2 76 it contains 'a_5' rather than 'a'. */
1a24f92f 77
38635499
DN
78
79/* Structure storing statistics on how many call clobbers we have, and
80 how many where avoided. */
81
82static struct
83{
84 /* Number of call-clobbered ops we attempt to add to calls in
85 add_call_clobbered_mem_symbols. */
86 unsigned int clobbered_vars;
87
88 /* Number of write-clobbers (VDEFs) avoided by using
89 not_written information. */
90 unsigned int static_write_clobbers_avoided;
91
92 /* Number of reads (VUSEs) avoided by using not_read information. */
93 unsigned int static_read_clobbers_avoided;
94
95 /* Number of write-clobbers avoided because the variable can't escape to
96 this call. */
97 unsigned int unescapable_clobbers_avoided;
98
99 /* Number of read-only uses we attempt to add to calls in
100 add_call_read_mem_symbols. */
101 unsigned int readonly_clobbers;
102
103 /* Number of read-only uses we avoid using not_read information. */
104 unsigned int static_readonly_clobbers_avoided;
105} clobber_stats;
106
107
1e6a5d3c 108/* Flags to describe operand properties in helpers. */
6de9cd9a
DN
109
110/* By default, operands are loaded. */
38635499 111#define opf_use 0
6de9cd9a 112
a32b97a2 113/* Operand is the target of an assignment expression or a
65ad7c63 114 call-clobbered variable. */
38635499 115#define opf_def (1 << 0)
a32b97a2 116
6de9cd9a
DN
117/* No virtual operands should be created in the expression. This is used
118 when traversing ADDR_EXPR nodes which have different semantics than
119 other expressions. Inside an ADDR_EXPR node, the only operands that we
120 need to consider are indices into arrays. For instance, &a.b[i] should
121 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
122 VUSE for 'b'. */
38635499 123#define opf_no_vops (1 << 1)
6de9cd9a 124
38635499
DN
125/* Operand is an implicit reference. This is used to distinguish
126 explicit assignments in the form of GIMPLE_MODIFY_STMT from
127 clobbering sites like function calls or ASM_EXPRs. */
128#define opf_implicit (1 << 2)
0d2bf6f0 129
6de9cd9a 130/* Array for building all the def operands. */
f3940b0e 131static VEC(tree,heap) *build_defs;
6de9cd9a
DN
132
133/* Array for building all the use operands. */
f3940b0e 134static VEC(tree,heap) *build_uses;
6de9cd9a 135
38635499
DN
136/* Set for building all the VDEF operands. */
137static VEC(tree,heap) *build_vdefs;
6de9cd9a 138
38635499 139/* Set for building all the VUSE operands. */
f3940b0e 140static VEC(tree,heap) *build_vuses;
6de9cd9a 141
497f1b81 142/* Bitmap obstack for our datastructures that needs to survive across
04b5b56c 143 compilations of multiple functions. */
497f1b81 144static bitmap_obstack operands_bitmap_obstack;
38635499
DN
145/* Set for building all the loaded symbols. */
146static bitmap build_loads;
147
148/* Set for building all the stored symbols. */
149static bitmap build_stores;
a32b97a2 150
1a24f92f 151static void get_expr_operands (tree, tree *, int);
02075bb2 152
456cde30
JH
153/* Number of functions with initialized ssa_operands. */
154static int n_initialized = 0;
1a24f92f 155
cfaab3a9
DN
156/* Statement change buffer. Data structure used to record state
157 information for statements. This is used to determine what needs
158 to be done in order to update the SSA web after a statement is
159 modified by a pass. If STMT is a statement that has just been
160 created, or needs to be folded via fold_stmt, or anything that
161 changes its physical structure then the pass should:
162
163 1- Call push_stmt_changes (&stmt) to record the current state of
164 STMT before any modifications are made.
165
166 2- Make all appropriate modifications to the statement.
167
168 3- Call pop_stmt_changes (&stmt) to find new symbols that
169 need to be put in SSA form, SSA name mappings for names that
170 have disappeared, recompute invariantness for address
171 expressions, cleanup EH information, etc.
172
173 If it is possible to determine that the statement was not modified,
174 instead of calling pop_stmt_changes it is quicker to call
175 discard_stmt_changes to avoid the expensive and unnecessary operand
176 re-scan and change comparison. */
177
178struct scb_d
179{
180 /* Pointer to the statement being modified. */
181 tree *stmt_p;
182
183 /* If the statement references memory these are the sets of symbols
184 loaded and stored by the statement. */
185 bitmap loads;
186 bitmap stores;
187};
188
189typedef struct scb_d *scb_t;
190DEF_VEC_P(scb_t);
191DEF_VEC_ALLOC_P(scb_t,heap);
192
193/* Stack of statement change buffers (SCB). Every call to
194 push_stmt_changes pushes a new buffer onto the stack. Calls to
195 pop_stmt_changes pop a buffer off of the stack and compute the set
196 of changes for the popped statement. */
197static VEC(scb_t,heap) *scb_stack;
198
c83eecad 199/* Return the DECL_UID of the base variable of T. */
1a24f92f 200
f47c96aa 201static inline unsigned
f3940b0e 202get_name_decl (tree t)
6de9cd9a 203{
f3940b0e
AM
204 if (TREE_CODE (t) != SSA_NAME)
205 return DECL_UID (t);
206 else
207 return DECL_UID (SSA_NAME_VAR (t));
6de9cd9a
DN
208}
209
02075bb2 210
f3940b0e 211/* Comparison function for qsort used in operand_build_sort_virtual. */
1a24f92f 212
f3940b0e
AM
213static int
214operand_build_cmp (const void *p, const void *q)
a32b97a2 215{
f3940b0e
AM
216 tree e1 = *((const tree *)p);
217 tree e2 = *((const tree *)q);
218 unsigned int u1,u2;
219
220 u1 = get_name_decl (e1);
221 u2 = get_name_decl (e2);
f47c96aa 222
f3940b0e 223 /* We want to sort in ascending order. They can never be equal. */
f47c96aa 224#ifdef ENABLE_CHECKING
f3940b0e 225 gcc_assert (u1 != u2);
f47c96aa 226#endif
f3940b0e 227 return (u1 > u2 ? 1 : -1);
a32b97a2
BB
228}
229
02075bb2 230
f3940b0e 231/* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
1a24f92f 232
6de9cd9a 233static inline void
f3940b0e 234operand_build_sort_virtual (VEC(tree,heap) *list)
6de9cd9a 235{
f3940b0e 236 int num = VEC_length (tree, list);
65ad7c63 237
f3940b0e
AM
238 if (num < 2)
239 return;
65ad7c63 240
f3940b0e 241 if (num == 2)
6de9cd9a 242 {
f3940b0e
AM
243 if (get_name_decl (VEC_index (tree, list, 0))
244 > get_name_decl (VEC_index (tree, list, 1)))
245 {
246 /* Swap elements if in the wrong order. */
247 tree tmp = VEC_index (tree, list, 0);
248 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
249 VEC_replace (tree, list, 1, tmp);
250 }
f47c96aa 251 return;
6de9cd9a 252 }
65ad7c63 253
f3940b0e
AM
254 /* There are 3 or more elements, call qsort. */
255 qsort (VEC_address (tree, list),
256 VEC_length (tree, list),
257 sizeof (tree),
258 operand_build_cmp);
6de9cd9a
DN
259}
260
f430bae8 261
65ad7c63 262/* Return true if the SSA operands cache is active. */
1a24f92f 263
f47c96aa
AM
264bool
265ssa_operands_active (void)
6de9cd9a 266{
456cde30 267 return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
f47c96aa 268}
6de9cd9a 269
02075bb2 270
79f99d42
AM
271/* VOPs are of variable sized, so the free list maps "free buckets" to the
272 following table:
273 bucket # operands
274 ------ ----------
275 0 1
276 1 2
277 ...
278 15 16
279 16 17-24
280 17 25-32
281 18 31-40
282 ...
283 29 121-128
284 Any VOPs larger than this are simply added to the largest bucket when they
285 are freed. */
286
287
288/* Return the number of operands used in bucket BUCKET. */
289
290static inline int
291vop_free_bucket_size (int bucket)
292{
293#ifdef ENABLE_CHECKING
294 gcc_assert (bucket >= 0 && bucket < NUM_VOP_FREE_BUCKETS);
295#endif
296 if (bucket < 16)
297 return bucket + 1;
298 return (bucket - 13) * 8;
299}
300
301
302/* For a vop of NUM operands, return the bucket NUM belongs to. If NUM is
303 beyond the end of the bucket table, return -1. */
304
305static inline int
306vop_free_bucket_index (int num)
307{
4cdffd96 308 gcc_assert (num > 0 && NUM_VOP_FREE_BUCKETS > 16);
79f99d42
AM
309
310 /* Sizes 1 through 16 use buckets 0-15. */
311 if (num <= 16)
312 return num - 1;
4cdffd96
JH
313 /* Buckets 16 - NUM_VOP_FREE_BUCKETS represent 8 unit chunks. */
314 num = 14 + (num - 1) / 8;
315 if (num >= NUM_VOP_FREE_BUCKETS)
316 return -1;
317 else
318 return num;
79f99d42
AM
319}
320
321
322/* Initialize the VOP free buckets. */
323
324static inline void
325init_vop_buckets (void)
326{
327 int x;
328
329 for (x = 0; x < NUM_VOP_FREE_BUCKETS; x++)
330 gimple_ssa_operands (cfun)->vop_free_buckets[x] = NULL;
331}
332
333
334/* Add PTR to the appropriate VOP bucket. */
335
336static inline void
337add_vop_to_freelist (voptype_p ptr)
338{
339 int bucket = vop_free_bucket_index (VUSE_VECT_NUM_ELEM (ptr->usev));
340
341 /* Too large, use the largest bucket so its not a complete throw away. */
342 if (bucket == -1)
343 bucket = NUM_VOP_FREE_BUCKETS - 1;
344
345 ptr->next = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
346 gimple_ssa_operands (cfun)->vop_free_buckets[bucket] = ptr;
347}
348
349
350/* These are the sizes of the operand memory buffer which gets allocated each
351 time more operands space is required. The final value is the amount that is
352 allocated every time after that. */
353
354#define OP_SIZE_INIT 0
355#define OP_SIZE_1 30
356#define OP_SIZE_2 110
357#define OP_SIZE_3 511
358
f47c96aa
AM
359/* Initialize the operand cache routines. */
360
361void
362init_ssa_operands (void)
363{
456cde30
JH
364 if (!n_initialized++)
365 {
366 build_defs = VEC_alloc (tree, heap, 5);
367 build_uses = VEC_alloc (tree, heap, 10);
368 build_vuses = VEC_alloc (tree, heap, 25);
38635499 369 build_vdefs = VEC_alloc (tree, heap, 25);
497f1b81
JH
370 bitmap_obstack_initialize (&operands_bitmap_obstack);
371 build_loads = BITMAP_ALLOC (&operands_bitmap_obstack);
372 build_stores = BITMAP_ALLOC (&operands_bitmap_obstack);
38635499 373 scb_stack = VEC_alloc (scb_t, heap, 20);
456cde30
JH
374 }
375
376 gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
38635499 377 gcc_assert (gimple_ssa_operands (cfun)->mpt_table == NULL);
497f1b81
JH
378 gimple_ssa_operands (cfun)->operand_memory_index
379 = gimple_ssa_operands (cfun)->ssa_operand_mem_size;
456cde30 380 gimple_ssa_operands (cfun)->ops_active = true;
d16a5e36 381 memset (&clobber_stats, 0, sizeof (clobber_stats));
79f99d42 382 init_vop_buckets ();
497f1b81 383 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT;
f47c96aa 384}
6de9cd9a 385
1a24f92f 386
f47c96aa
AM
387/* Dispose of anything required by the operand routines. */
388
389void
390fini_ssa_operands (void)
391{
392 struct ssa_operand_memory_d *ptr;
38635499
DN
393 unsigned ix;
394 tree mpt;
395
456cde30
JH
396 if (!--n_initialized)
397 {
398 VEC_free (tree, heap, build_defs);
399 VEC_free (tree, heap, build_uses);
38635499 400 VEC_free (tree, heap, build_vdefs);
456cde30 401 VEC_free (tree, heap, build_vuses);
38635499
DN
402 BITMAP_FREE (build_loads);
403 BITMAP_FREE (build_stores);
404
405 /* The change buffer stack had better be empty. */
406 gcc_assert (VEC_length (scb_t, scb_stack) == 0);
407 VEC_free (scb_t, heap, scb_stack);
408 scb_stack = NULL;
456cde30 409 }
38635499 410
456cde30
JH
411 gimple_ssa_operands (cfun)->free_defs = NULL;
412 gimple_ssa_operands (cfun)->free_uses = NULL;
38635499 413
456cde30 414 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
f47c96aa 415 {
456cde30
JH
416 gimple_ssa_operands (cfun)->operand_memory
417 = gimple_ssa_operands (cfun)->operand_memory->next;
f47c96aa 418 ggc_free (ptr);
1a24f92f
AM
419 }
420
38635499
DN
421 for (ix = 0;
422 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, ix, mpt);
423 ix++)
424 {
425 if (mpt)
426 BITMAP_FREE (MPT_SYMBOLS (mpt));
427 }
428
429 VEC_free (tree, heap, gimple_ssa_operands (cfun)->mpt_table);
430
456cde30 431 gimple_ssa_operands (cfun)->ops_active = false;
38635499 432
497f1b81
JH
433 if (!n_initialized)
434 bitmap_obstack_release (&operands_bitmap_obstack);
d16a5e36
DB
435 if (dump_file && (dump_flags & TDF_STATS))
436 {
38635499 437 fprintf (dump_file, "Original clobbered vars: %d\n",
02075bb2 438 clobber_stats.clobbered_vars);
38635499 439 fprintf (dump_file, "Static write clobbers avoided: %d\n",
02075bb2 440 clobber_stats.static_write_clobbers_avoided);
38635499 441 fprintf (dump_file, "Static read clobbers avoided: %d\n",
02075bb2 442 clobber_stats.static_read_clobbers_avoided);
38635499 443 fprintf (dump_file, "Unescapable clobbers avoided: %d\n",
02075bb2 444 clobber_stats.unescapable_clobbers_avoided);
38635499 445 fprintf (dump_file, "Original read-only clobbers: %d\n",
02075bb2 446 clobber_stats.readonly_clobbers);
38635499 447 fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
02075bb2 448 clobber_stats.static_readonly_clobbers_avoided);
d16a5e36 449 }
f47c96aa 450}
1a24f92f 451
6de9cd9a 452
f47c96aa
AM
453/* Return memory for operands of SIZE chunks. */
454
455static inline void *
456ssa_operand_alloc (unsigned size)
457{
458 char *ptr;
38635499 459
456cde30 460 if (gimple_ssa_operands (cfun)->operand_memory_index + size
497f1b81 461 >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
f47c96aa
AM
462 {
463 struct ssa_operand_memory_d *ptr;
79f99d42 464
497f1b81
JH
465 if (gimple_ssa_operands (cfun)->ssa_operand_mem_size == OP_SIZE_INIT)
466 gimple_ssa_operands (cfun)->ssa_operand_mem_size
467 = OP_SIZE_1 * sizeof (struct voptype_d);
79f99d42 468 else
497f1b81
JH
469 if (gimple_ssa_operands (cfun)->ssa_operand_mem_size
470 == OP_SIZE_1 * sizeof (struct voptype_d))
471 gimple_ssa_operands (cfun)->ssa_operand_mem_size
472 = OP_SIZE_2 * sizeof (struct voptype_d);
79f99d42 473 else
497f1b81
JH
474 gimple_ssa_operands (cfun)->ssa_operand_mem_size
475 = OP_SIZE_3 * sizeof (struct voptype_d);
79f99d42
AM
476
477 /* Go right to the maximum size if the request is too large. */
497f1b81
JH
478 if (size > gimple_ssa_operands (cfun)->ssa_operand_mem_size)
479 gimple_ssa_operands (cfun)->ssa_operand_mem_size
480 = OP_SIZE_3 * sizeof (struct voptype_d);
79f99d42 481
2e226e66
KH
482 /* Fail if there is not enough space. If there are this many operands
483 required, first make sure there isn't a different problem causing this
79f99d42
AM
484 many operands. If the decision is that this is OK, then we can
485 specially allocate a buffer just for this request. */
497f1b81 486 gcc_assert (size <= gimple_ssa_operands (cfun)->ssa_operand_mem_size);
79f99d42
AM
487
488 ptr = (struct ssa_operand_memory_d *)
489 ggc_alloc (sizeof (struct ssa_operand_memory_d)
497f1b81 490 + gimple_ssa_operands (cfun)->ssa_operand_mem_size - 1);
456cde30
JH
491 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
492 gimple_ssa_operands (cfun)->operand_memory = ptr;
493 gimple_ssa_operands (cfun)->operand_memory_index = 0;
f47c96aa 494 }
456cde30
JH
495 ptr = &(gimple_ssa_operands (cfun)->operand_memory
496 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
497 gimple_ssa_operands (cfun)->operand_memory_index += size;
f47c96aa 498 return ptr;
6de9cd9a
DN
499}
500
1a24f92f 501
79f99d42
AM
502/* Allocate a DEF operand. */
503
38635499
DN
504static inline struct def_optype_d *
505alloc_def (void)
506{
507 struct def_optype_d *ret;
508 if (gimple_ssa_operands (cfun)->free_defs)
509 {
510 ret = gimple_ssa_operands (cfun)->free_defs;
511 gimple_ssa_operands (cfun)->free_defs
512 = gimple_ssa_operands (cfun)->free_defs->next;
513 }
514 else
515 ret = (struct def_optype_d *)
79f99d42 516 ssa_operand_alloc (sizeof (struct def_optype_d));
38635499
DN
517 return ret;
518}
519
520
79f99d42
AM
521/* Allocate a USE operand. */
522
38635499
DN
523static inline struct use_optype_d *
524alloc_use (void)
525{
526 struct use_optype_d *ret;
527 if (gimple_ssa_operands (cfun)->free_uses)
528 {
529 ret = gimple_ssa_operands (cfun)->free_uses;
530 gimple_ssa_operands (cfun)->free_uses
531 = gimple_ssa_operands (cfun)->free_uses->next;
532 }
533 else
79f99d42
AM
534 ret = (struct use_optype_d *)
535 ssa_operand_alloc (sizeof (struct use_optype_d));
38635499
DN
536 return ret;
537}
538
539
79f99d42 540/* Allocate a vop with NUM elements. */
38635499 541
79f99d42
AM
542static inline struct voptype_d *
543alloc_vop (int num)
38635499 544{
79f99d42
AM
545 struct voptype_d *ret = NULL;
546 int alloc_size = 0;
547
548 int bucket = vop_free_bucket_index (num);
549 if (bucket != -1)
38635499 550 {
79f99d42
AM
551 /* If there is a free operand, use it. */
552 if (gimple_ssa_operands (cfun)->vop_free_buckets[bucket] != NULL)
553 {
554 ret = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
555 gimple_ssa_operands (cfun)->vop_free_buckets[bucket] =
556 gimple_ssa_operands (cfun)->vop_free_buckets[bucket]->next;
557 }
558 else
559 alloc_size = vop_free_bucket_size(bucket);
38635499
DN
560 }
561 else
79f99d42 562 alloc_size = num;
38635499 563
79f99d42
AM
564 if (alloc_size > 0)
565 ret = (struct voptype_d *)ssa_operand_alloc (
566 sizeof (struct voptype_d) + (alloc_size - 1) * sizeof (vuse_element_t));
38635499 567
38635499
DN
568 VUSE_VECT_NUM_ELEM (ret->usev) = num;
569 return ret;
570}
571
f430bae8 572
5dc2e333 573/* This routine makes sure that PTR is in an immediate use list, and makes
6c00f606 574 sure the stmt pointer is set to the current stmt. */
02075bb2 575
5dc2e333
AM
576static inline void
577set_virtual_use_link (use_operand_p ptr, tree stmt)
578{
65ad7c63 579 /* fold_stmt may have changed the stmt pointers. */
5dc2e333
AM
580 if (ptr->stmt != stmt)
581 ptr->stmt = stmt;
582
583 /* If this use isn't in a list, add it to the correct list. */
584 if (!ptr->prev)
585 link_imm_use (ptr, *(ptr->use));
586}
587
79f99d42
AM
588
589/* Adds OP to the list of defs after LAST. */
5dc2e333 590
38635499 591static inline def_optype_p
79f99d42 592add_def_op (tree *op, def_optype_p last)
ac574e1b
ZD
593{
594 def_optype_p new;
595
38635499 596 new = alloc_def ();
ac574e1b 597 DEF_OP_PTR (new) = op;
79f99d42
AM
598 last->next = new;
599 new->next = NULL;
38635499 600 return new;
ac574e1b
ZD
601}
602
79f99d42
AM
603
604/* Adds OP to the list of uses of statement STMT after LAST. */
ac574e1b 605
38635499 606static inline use_optype_p
79f99d42 607add_use_op (tree stmt, tree *op, use_optype_p last)
ac574e1b
ZD
608{
609 use_optype_p new;
610
38635499 611 new = alloc_use ();
79f99d42
AM
612 USE_OP_PTR (new)->use = op;
613 link_imm_use_stmt (USE_OP_PTR (new), *op, stmt);
614 last->next = new;
615 new->next = NULL;
38635499 616 return new;
ac574e1b
ZD
617}
618
ac574e1b 619
79f99d42 620/* Return a virtual op pointer with NUM elements which are all initialized to OP
2e226e66 621 and are linked into the immediate uses for STMT. The new vop is appended
79f99d42
AM
622 after PREV. */
623
624static inline voptype_p
625add_vop (tree stmt, tree op, int num, voptype_p prev)
ac574e1b 626{
79f99d42 627 voptype_p new;
38635499
DN
628 int x;
629
79f99d42 630 new = alloc_vop (num);
38635499
DN
631 for (x = 0; x < num; x++)
632 {
79f99d42 633 VUSE_OP_PTR (new, x)->prev = NULL;
38635499 634 SET_VUSE_OP (new, x, op);
79f99d42
AM
635 VUSE_OP_PTR (new, x)->use = &new->usev.uses[x].use_var;
636 link_imm_use_stmt (VUSE_OP_PTR (new, x), new->usev.uses[x].use_var, stmt);
38635499 637 }
ac574e1b 638
79f99d42
AM
639 if (prev)
640 prev->next = new;
641 new->next = NULL;
38635499 642 return new;
ac574e1b
ZD
643}
644
38635499 645
79f99d42 646/* Adds OP to the list of vuses of statement STMT after LAST, and moves
ac574e1b
ZD
647 LAST to the new element. */
648
79f99d42
AM
649static inline voptype_p
650add_vuse_op (tree stmt, tree op, int num, voptype_p last)
ac574e1b 651{
79f99d42
AM
652 voptype_p new = add_vop (stmt, op, num, last);
653 VDEF_RESULT (new) = NULL_TREE;
654 return new;
655}
38635499 656
ac574e1b 657
79f99d42
AM
658/* Adds OP to the list of vdefs of statement STMT after LAST, and moves
659 LAST to the new element. */
660
661static inline voptype_p
662add_vdef_op (tree stmt, tree op, int num, voptype_p last)
663{
664 voptype_p new = add_vop (stmt, op, num, last);
665 VDEF_RESULT (new) = op;
38635499 666 return new;
ac574e1b 667}
79f99d42 668
ac574e1b 669
79f99d42
AM
670/* Reallocate the virtual operand PTR so that it has NUM_ELEM use slots. ROOT
671 is the head of the operand list it belongs to. */
ac574e1b 672
79f99d42 673static inline struct voptype_d *
b2bcf557
ILT
674realloc_vop (struct voptype_d *ptr, unsigned int num_elem,
675 struct voptype_d **root)
ac574e1b 676{
b2bcf557 677 unsigned int x, lim;
79f99d42
AM
678 tree stmt, val;
679 struct voptype_d *ret, *tmp;
38635499
DN
680
681 if (VUSE_VECT_NUM_ELEM (ptr->usev) == num_elem)
682 return ptr;
79f99d42
AM
683
684 val = VUSE_OP (ptr, 0);
38635499
DN
685 if (TREE_CODE (val) == SSA_NAME)
686 val = SSA_NAME_VAR (val);
687
79f99d42 688 stmt = USE_STMT (VUSE_OP_PTR (ptr, 0));
38635499
DN
689
690 /* Delink all the existing uses. */
691 for (x = 0; x < VUSE_VECT_NUM_ELEM (ptr->usev); x++)
692 {
79f99d42 693 use_operand_p use_p = VUSE_OP_PTR (ptr, x);
38635499
DN
694 delink_imm_use (use_p);
695 }
696
697 /* If we want less space, simply use this one, and shrink the size. */
698 if (VUSE_VECT_NUM_ELEM (ptr->usev) > num_elem)
699 {
700 VUSE_VECT_NUM_ELEM (ptr->usev) = num_elem;
701 return ptr;
702 }
703
704 /* It is growing. Allocate a new one and replace the old one. */
79f99d42 705 ret = add_vuse_op (stmt, val, num_elem, ptr);
38635499 706
79f99d42 707 /* Clear PTR and add its memory to the free list. */
38635499
DN
708 lim = VUSE_VECT_NUM_ELEM (ptr->usev);
709 memset (ptr, 0,
79f99d42
AM
710 sizeof (struct voptype_d) + sizeof (vuse_element_t) * (lim- 1));
711 add_vop_to_freelist (ptr);
38635499
DN
712
713 /* Now simply remove the old one. */
79f99d42 714 if (*root == ptr)
38635499 715 {
79f99d42 716 *root = ret;
38635499
DN
717 return ret;
718 }
719 else
79f99d42 720 for (tmp = *root;
38635499
DN
721 tmp != NULL && tmp->next != ptr;
722 tmp = tmp->next)
723 {
724 tmp->next = ret;
725 return ret;
726 }
727
728 /* The pointer passed in isn't in STMT's VDEF lists. */
729 gcc_unreachable ();
730}
79f99d42 731
38635499 732
79f99d42 733/* Reallocate the PTR vdef so that it has NUM_ELEM use slots. */
38635499 734
79f99d42 735struct voptype_d *
b2bcf557 736realloc_vdef (struct voptype_d *ptr, unsigned int num_elem)
38635499 737{
38635499 738 tree val, stmt;
79f99d42 739 struct voptype_d *ret;
38635499 740
79f99d42
AM
741 val = VDEF_RESULT (ptr);
742 stmt = USE_STMT (VDEF_OP_PTR (ptr, 0));
743 ret = realloc_vop (ptr, num_elem, &(VDEF_OPS (stmt)));
744 VDEF_RESULT (ret) = val;
745 return ret;
746}
38635499 747
38635499 748
79f99d42 749/* Reallocate the PTR vuse so that it has NUM_ELEM use slots. */
38635499 750
79f99d42 751struct voptype_d *
b2bcf557 752realloc_vuse (struct voptype_d *ptr, unsigned int num_elem)
79f99d42
AM
753{
754 tree stmt;
755 struct voptype_d *ret;
ac574e1b 756
79f99d42
AM
757 stmt = USE_STMT (VUSE_OP_PTR (ptr, 0));
758 ret = realloc_vop (ptr, num_elem, &(VUSE_OPS (stmt)));
759 return ret;
ac574e1b
ZD
760}
761
79f99d42 762
ac574e1b 763/* Takes elements from build_defs and turns them into def operands of STMT.
79f99d42 764 TODO -- Make build_defs VEC of tree *. */
ac574e1b
ZD
765
766static inline void
79f99d42 767finalize_ssa_defs (tree stmt)
ac574e1b
ZD
768{
769 unsigned new_i;
770 struct def_optype_d new_list;
6677e189 771 def_optype_p old_ops, last;
79f99d42
AM
772 unsigned int num = VEC_length (tree, build_defs);
773
774 /* There should only be a single real definition per assignment. */
775 gcc_assert ((stmt && TREE_CODE (stmt) != GIMPLE_MODIFY_STMT) || num <= 1);
ac574e1b
ZD
776
777 new_list.next = NULL;
778 last = &new_list;
779
780 old_ops = DEF_OPS (stmt);
781
782 new_i = 0;
1a24f92f 783
79f99d42
AM
784 /* Check for the common case of 1 def that hasn't changed. */
785 if (old_ops && old_ops->next == NULL && num == 1
786 && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
787 return;
ac574e1b
ZD
788
789 /* If there is anything in the old list, free it. */
790 if (old_ops)
791 {
456cde30
JH
792 old_ops->next = gimple_ssa_operands (cfun)->free_defs;
793 gimple_ssa_operands (cfun)->free_defs = old_ops;
ac574e1b
ZD
794 }
795
79f99d42
AM
796 /* If there is anything remaining in the build_defs list, simply emit it. */
797 for ( ; new_i < num; new_i++)
798 last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
799
ac574e1b
ZD
800 /* Now set the stmt's operands. */
801 DEF_OPS (stmt) = new_list.next;
802
803#ifdef ENABLE_CHECKING
804 {
6677e189 805 def_optype_p ptr;
ac574e1b
ZD
806 unsigned x = 0;
807 for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
808 x++;
809
79f99d42 810 gcc_assert (x == num);
ac574e1b
ZD
811 }
812#endif
813}
f47c96aa 814
6de9cd9a 815
ac574e1b 816/* Takes elements from build_uses and turns them into use operands of STMT.
6c00f606 817 TODO -- Make build_uses VEC of tree *. */
ac574e1b
ZD
818
819static inline void
79f99d42 820finalize_ssa_uses (tree stmt)
ac574e1b
ZD
821{
822 unsigned new_i;
823 struct use_optype_d new_list;
824 use_optype_p old_ops, ptr, last;
ac574e1b 825
79f99d42
AM
826#ifdef ENABLE_CHECKING
827 {
828 unsigned x;
829 unsigned num = VEC_length (tree, build_uses);
830
831 /* If the pointer to the operand is the statement itself, something is
832 wrong. It means that we are pointing to a local variable (the
833 initial call to update_stmt_operands does not pass a pointer to a
834 statement). */
835 for (x = 0; x < num; x++)
836 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
837 }
838#endif
839
ac574e1b
ZD
840 new_list.next = NULL;
841 last = &new_list;
842
843 old_ops = USE_OPS (stmt);
844
ac574e1b
ZD
845 /* If there is anything in the old list, free it. */
846 if (old_ops)
847 {
848 for (ptr = old_ops; ptr; ptr = ptr->next)
849 delink_imm_use (USE_OP_PTR (ptr));
456cde30
JH
850 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
851 gimple_ssa_operands (cfun)->free_uses = old_ops;
ac574e1b
ZD
852 }
853
6c00f606
AM
854 /* Now create nodes for all the new nodes. */
855 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
79f99d42
AM
856 last = add_use_op (stmt,
857 (tree *) VEC_index (tree, build_uses, new_i),
858 last);
6c00f606 859
ac574e1b
ZD
860 /* Now set the stmt's operands. */
861 USE_OPS (stmt) = new_list.next;
862
863#ifdef ENABLE_CHECKING
864 {
865 unsigned x = 0;
866 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
867 x++;
868
869 gcc_assert (x == VEC_length (tree, build_uses));
870 }
871#endif
872}
f47c96aa 873
ac574e1b 874
38635499
DN
875/* Takes elements from BUILD_VDEFS and turns them into vdef operands of
876 STMT. FIXME, for now VDEF operators should have a single operand
877 in their RHS. */
ac574e1b
ZD
878
879static inline void
79f99d42 880finalize_ssa_vdefs (tree stmt)
ac574e1b
ZD
881{
882 unsigned new_i;
79f99d42
AM
883 struct voptype_d new_list;
884 voptype_p old_ops, ptr, last;
38635499
DN
885 stmt_ann_t ann = stmt_ann (stmt);
886
887 /* Set the symbols referenced by STMT. */
888 if (!bitmap_empty_p (build_stores))
889 {
890 if (ann->operands.stores == NULL)
497f1b81 891 ann->operands.stores = BITMAP_ALLOC (&operands_bitmap_obstack);
38635499
DN
892
893 bitmap_copy (ann->operands.stores, build_stores);
894 }
895 else
896 BITMAP_FREE (ann->operands.stores);
897
898 /* If aliases have not been computed, do not instantiate a virtual
899 operator on STMT. Initially, we only compute the SSA form on
900 GIMPLE registers. The virtual SSA form is only computed after
901 alias analysis, so virtual operators will remain unrenamed and
902 the verifier will complain. However, alias analysis needs to
903 access symbol load/store information, so we need to compute
904 those. */
905 if (!gimple_aliases_computed_p (cfun))
906 return;
ac574e1b
ZD
907
908 new_list.next = NULL;
909 last = &new_list;
910
38635499 911 old_ops = VDEF_OPS (stmt);
ac574e1b 912 new_i = 0;
38635499 913 while (old_ops && new_i < VEC_length (tree, build_vdefs))
ac574e1b 914 {
38635499
DN
915 tree op = VEC_index (tree, build_vdefs, new_i);
916 unsigned new_uid = get_name_decl (op);
917 unsigned old_uid = get_name_decl (VDEF_RESULT (old_ops));
ac574e1b 918
38635499
DN
919 /* FIXME, for now each VDEF operator should have at most one
920 operand in their RHS. */
921 gcc_assert (VDEF_NUM (old_ops) == 1);
922
923 if (old_uid == new_uid)
ac574e1b 924 {
38635499 925 /* If the symbols are the same, reuse the existing operand. */
79f99d42
AM
926 last->next = old_ops;
927 last = old_ops;
928 old_ops = old_ops->next;
929 last->next = NULL;
38635499 930 set_virtual_use_link (VDEF_OP_PTR (last, 0), stmt);
ac574e1b
ZD
931 new_i++;
932 }
38635499 933 else if (old_uid < new_uid)
ac574e1b 934 {
38635499 935 /* If old is less than new, old goes to the free list. */
79f99d42 936 voptype_p next;
38635499 937 delink_imm_use (VDEF_OP_PTR (old_ops, 0));
79f99d42
AM
938 next = old_ops->next;
939 add_vop_to_freelist (old_ops);
940 old_ops = next;
ac574e1b
ZD
941 }
942 else
943 {
944 /* This is a new operand. */
79f99d42 945 last = add_vdef_op (stmt, op, 1, last);
ac574e1b
ZD
946 new_i++;
947 }
948 }
949
38635499
DN
950 /* If there is anything remaining in BUILD_VDEFS, simply emit it. */
951 for ( ; new_i < VEC_length (tree, build_vdefs); new_i++)
79f99d42 952 last = add_vdef_op (stmt, VEC_index (tree, build_vdefs, new_i), 1, last);
ac574e1b
ZD
953
954 /* If there is anything in the old list, free it. */
955 if (old_ops)
956 {
79f99d42
AM
957 for (ptr = old_ops; ptr; ptr = last)
958 {
959 last = ptr->next;
960 delink_imm_use (VDEF_OP_PTR (ptr, 0));
961 add_vop_to_freelist (ptr);
962 }
ac574e1b
ZD
963 }
964
38635499
DN
965 /* Now set STMT's operands. */
966 VDEF_OPS (stmt) = new_list.next;
ac574e1b
ZD
967
968#ifdef ENABLE_CHECKING
969 {
970 unsigned x = 0;
38635499 971 for (ptr = VDEF_OPS (stmt); ptr; ptr = ptr->next)
ac574e1b
ZD
972 x++;
973
38635499 974 gcc_assert (x == VEC_length (tree, build_vdefs));
ac574e1b
ZD
975 }
976#endif
977}
978
38635499 979
38635499 980/* Takes elements from BUILD_VUSES and turns them into VUSE operands of
ac574e1b
ZD
981 STMT. */
982
983static inline void
984finalize_ssa_vuse_ops (tree stmt)
985{
b2bcf557 986 unsigned new_i, old_i;
79f99d42 987 voptype_p old_ops, last;
38635499
DN
988 VEC(tree,heap) *new_ops;
989 stmt_ann_t ann;
ac574e1b 990
38635499
DN
991 /* Set the symbols referenced by STMT. */
992 ann = stmt_ann (stmt);
993 if (!bitmap_empty_p (build_loads))
994 {
995 if (ann->operands.loads == NULL)
497f1b81 996 ann->operands.loads = BITMAP_ALLOC (&operands_bitmap_obstack);
ac574e1b 997
38635499
DN
998 bitmap_copy (ann->operands.loads, build_loads);
999 }
1000 else
1001 BITMAP_FREE (ann->operands.loads);
1002
1003 /* If aliases have not been computed, do not instantiate a virtual
1004 operator on STMT. Initially, we only compute the SSA form on
1005 GIMPLE registers. The virtual SSA form is only computed after
1006 alias analysis, so virtual operators will remain unrenamed and
1007 the verifier will complain. However, alias analysis needs to
1008 access symbol load/store information, so we need to compute
1009 those. */
1010 if (!gimple_aliases_computed_p (cfun))
1011 return;
1012
1013 /* STMT should have at most one VUSE operator. */
ac574e1b 1014 old_ops = VUSE_OPS (stmt);
38635499 1015 gcc_assert (old_ops == NULL || old_ops->next == NULL);
ac574e1b 1016
38635499
DN
1017 new_ops = NULL;
1018 new_i = old_i = 0;
1019 while (old_ops
1020 && old_i < VUSE_NUM (old_ops)
1021 && new_i < VEC_length (tree, build_vuses))
ac574e1b 1022 {
38635499
DN
1023 tree new_op = VEC_index (tree, build_vuses, new_i);
1024 tree old_op = VUSE_OP (old_ops, old_i);
1025 unsigned new_uid = get_name_decl (new_op);
1026 unsigned old_uid = get_name_decl (old_op);
1a24f92f 1027
38635499 1028 if (old_uid == new_uid)
ac574e1b 1029 {
38635499
DN
1030 /* If the symbols are the same, reuse the existing operand. */
1031 VEC_safe_push (tree, heap, new_ops, old_op);
ac574e1b 1032 new_i++;
38635499 1033 old_i++;
ac574e1b 1034 }
38635499 1035 else if (old_uid < new_uid)
ac574e1b 1036 {
38635499
DN
1037 /* If OLD_UID is less than NEW_UID, the old operand has
1038 disappeared, skip to the next old operand. */
1039 old_i++;
ac574e1b
ZD
1040 }
1041 else
1042 {
1043 /* This is a new operand. */
38635499 1044 VEC_safe_push (tree, heap, new_ops, new_op);
ac574e1b
ZD
1045 new_i++;
1046 }
1047 }
1048
1049 /* If there is anything remaining in the build_vuses list, simply emit it. */
1050 for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
38635499 1051 VEC_safe_push (tree, heap, new_ops, VEC_index (tree, build_vuses, new_i));
ac574e1b
ZD
1052
1053 /* If there is anything in the old list, free it. */
1054 if (old_ops)
1055 {
38635499
DN
1056 for (old_i = 0; old_i < VUSE_NUM (old_ops); old_i++)
1057 delink_imm_use (VUSE_OP_PTR (old_ops, old_i));
79f99d42 1058 add_vop_to_freelist (old_ops);
38635499 1059 VUSE_OPS (stmt) = NULL;
ac574e1b
ZD
1060 }
1061
38635499
DN
1062 /* If there are any operands, instantiate a VUSE operator for STMT. */
1063 if (new_ops)
1064 {
1065 tree op;
1066 unsigned i;
1067
79f99d42 1068 last = add_vuse_op (stmt, NULL, VEC_length (tree, new_ops), NULL);
38635499
DN
1069
1070 for (i = 0; VEC_iterate (tree, new_ops, i, op); i++)
1071 SET_USE (VUSE_OP_PTR (last, (int) i), op);
1072
79f99d42 1073 VUSE_OPS (stmt) = last;
38635499 1074 }
ac574e1b
ZD
1075
1076#ifdef ENABLE_CHECKING
1077 {
38635499
DN
1078 unsigned x;
1079
1080 if (VUSE_OPS (stmt))
1081 {
1082 gcc_assert (VUSE_OPS (stmt)->next == NULL);
1083 x = VUSE_NUM (VUSE_OPS (stmt));
1084 }
1085 else
1086 x = 0;
ac574e1b
ZD
1087
1088 gcc_assert (x == VEC_length (tree, build_vuses));
1089 }
1090#endif
1091}
38635499
DN
1092
1093/* Return a new VUSE operand vector for STMT. */
f47c96aa
AM
1094
1095static void
1096finalize_ssa_vuses (tree stmt)
1a24f92f 1097{
38635499 1098 unsigned num, num_vdefs;
f3940b0e 1099 unsigned vuse_index;
6de9cd9a
DN
1100
1101 /* Remove superfluous VUSE operands. If the statement already has a
38635499
DN
1102 VDEF operator for a variable 'a', then a VUSE for 'a' is not
1103 needed because VDEFs imply a VUSE of the variable. For instance,
1104 suppose that variable 'a' is pointed-to by p and q:
6de9cd9a
DN
1105
1106 # VUSE <a_2>
38635499
DN
1107 # a_3 = VDEF <a_2>
1108 *p = *q;
6de9cd9a 1109
65ad7c63 1110 The VUSE <a_2> is superfluous because it is implied by the
38635499 1111 VDEF operator. */
f3940b0e 1112 num = VEC_length (tree, build_vuses);
38635499 1113 num_vdefs = VEC_length (tree, build_vdefs);
1a24f92f 1114
38635499
DN
1115 if (num > 0 && num_vdefs > 0)
1116 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
1117 {
1118 tree vuse;
1119 vuse = VEC_index (tree, build_vuses, vuse_index);
1120 if (TREE_CODE (vuse) != SSA_NAME)
1121 {
1122 var_ann_t ann = var_ann (vuse);
1123 ann->in_vuse_list = 0;
1124 if (ann->in_vdef_list)
1125 {
1126 VEC_ordered_remove (tree, build_vuses, vuse_index);
1127 continue;
1128 }
1129 }
1130 vuse_index++;
1131 }
e288e2f5 1132
f47c96aa 1133 finalize_ssa_vuse_ops (stmt);
6de9cd9a 1134}
1a24f92f 1135
38635499
DN
1136
1137/* Clear the in_list bits and empty the build array for VDEFs and
1138 VUSEs. */
ac574e1b
ZD
1139
1140static inline void
38635499 1141cleanup_build_arrays (void)
ac574e1b 1142{
38635499
DN
1143 unsigned i;
1144 tree t;
ac574e1b 1145
38635499
DN
1146 for (i = 0; VEC_iterate (tree, build_vdefs, i, t); i++)
1147 if (TREE_CODE (t) != SSA_NAME)
1148 var_ann (t)->in_vdef_list = false;
ac574e1b 1149
38635499
DN
1150 for (i = 0; VEC_iterate (tree, build_vuses, i, t); i++)
1151 if (TREE_CODE (t) != SSA_NAME)
1152 var_ann (t)->in_vuse_list = false;
ac574e1b 1153
38635499
DN
1154 VEC_truncate (tree, build_vdefs, 0);
1155 VEC_truncate (tree, build_vuses, 0);
1156 VEC_truncate (tree, build_defs, 0);
1157 VEC_truncate (tree, build_uses, 0);
1158 bitmap_clear (build_loads);
1159 bitmap_clear (build_stores);
a32b97a2
BB
1160}
1161
6de9cd9a 1162
1a24f92f 1163/* Finalize all the build vectors, fill the new ones into INFO. */
f47c96aa 1164
1a24f92f 1165static inline void
f47c96aa 1166finalize_ssa_stmt_operands (tree stmt)
1a24f92f 1167{
f47c96aa
AM
1168 finalize_ssa_defs (stmt);
1169 finalize_ssa_uses (stmt);
38635499 1170 finalize_ssa_vdefs (stmt);
f47c96aa 1171 finalize_ssa_vuses (stmt);
38635499 1172 cleanup_build_arrays ();
6de9cd9a
DN
1173}
1174
1175
1a24f92f
AM
1176/* Start the process of building up operands vectors in INFO. */
1177
1178static inline void
1179start_ssa_stmt_operands (void)
6de9cd9a 1180{
f3940b0e
AM
1181 gcc_assert (VEC_length (tree, build_defs) == 0);
1182 gcc_assert (VEC_length (tree, build_uses) == 0);
1183 gcc_assert (VEC_length (tree, build_vuses) == 0);
38635499
DN
1184 gcc_assert (VEC_length (tree, build_vdefs) == 0);
1185 gcc_assert (bitmap_empty_p (build_loads));
1186 gcc_assert (bitmap_empty_p (build_stores));
6de9cd9a
DN
1187}
1188
1189
1a24f92f 1190/* Add DEF_P to the list of pointers to operands. */
6de9cd9a
DN
1191
1192static inline void
1a24f92f 1193append_def (tree *def_p)
6de9cd9a 1194{
38635499 1195 VEC_safe_push (tree, heap, build_defs, (tree) def_p);
6de9cd9a
DN
1196}
1197
1198
1a24f92f 1199/* Add USE_P to the list of pointers to operands. */
6de9cd9a
DN
1200
1201static inline void
1a24f92f 1202append_use (tree *use_p)
6de9cd9a 1203{
38635499 1204 VEC_safe_push (tree, heap, build_uses, (tree) use_p);
6de9cd9a
DN
1205}
1206
1207
38635499 1208/* Add VAR to the set of variables that require a VDEF operator. */
6de9cd9a 1209
1a24f92f 1210static inline void
38635499 1211append_vdef (tree var)
6de9cd9a 1212{
38635499
DN
1213 tree sym;
1214
f47c96aa
AM
1215 if (TREE_CODE (var) != SSA_NAME)
1216 {
38635499
DN
1217 tree mpt;
1218 var_ann_t ann;
1219
1220 /* If VAR belongs to a memory partition, use it instead of VAR. */
1221 mpt = memory_partition (var);
1222 if (mpt)
1223 var = mpt;
6de9cd9a 1224
f47c96aa 1225 /* Don't allow duplicate entries. */
38635499
DN
1226 ann = get_var_ann (var);
1227 if (ann->in_vdef_list)
1228 return;
1229
1230 ann->in_vdef_list = true;
1231 sym = var;
f47c96aa 1232 }
38635499
DN
1233 else
1234 sym = SSA_NAME_VAR (var);
6de9cd9a 1235
38635499
DN
1236 VEC_safe_push (tree, heap, build_vdefs, var);
1237 bitmap_set_bit (build_stores, DECL_UID (sym));
6de9cd9a
DN
1238}
1239
1240
38635499 1241/* Add VAR to the set of variables that require a VUSE operator. */
6de9cd9a 1242
1a24f92f
AM
1243static inline void
1244append_vuse (tree var)
6de9cd9a 1245{
38635499
DN
1246 tree sym;
1247
e288e2f5
AM
1248 if (TREE_CODE (var) != SSA_NAME)
1249 {
38635499
DN
1250 tree mpt;
1251 var_ann_t ann;
6de9cd9a 1252
38635499
DN
1253 /* If VAR belongs to a memory partition, use it instead of VAR. */
1254 mpt = memory_partition (var);
1255 if (mpt)
1256 var = mpt;
a32b97a2 1257
38635499
DN
1258 /* Don't allow duplicate entries. */
1259 ann = get_var_ann (var);
1260 if (ann->in_vuse_list || ann->in_vdef_list)
1261 return;
a32b97a2 1262
38635499
DN
1263 ann->in_vuse_list = true;
1264 sym = var;
1265 }
1266 else
1267 sym = SSA_NAME_VAR (var);
a32b97a2 1268
38635499
DN
1269 VEC_safe_push (tree, heap, build_vuses, var);
1270 bitmap_set_bit (build_loads, DECL_UID (sym));
a32b97a2
BB
1271}
1272
6de9cd9a 1273
02075bb2
DN
1274/* REF is a tree that contains the entire pointer dereference
1275 expression, if available, or NULL otherwise. ALIAS is the variable
1276 we are asking if REF can access. OFFSET and SIZE come from the
548a6c6d 1277 memory access expression that generated this virtual operand. */
9390c347 1278
02075bb2
DN
1279static bool
1280access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1281 HOST_WIDE_INT size)
38635499 1282{
02075bb2
DN
1283 bool offsetgtz = offset > 0;
1284 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1285 tree base = ref ? get_base_address (ref) : NULL;
6de9cd9a 1286
548a6c6d
DN
1287 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1288 using a call-clobbered memory tag. By definition, call-clobbered
1289 memory tags can always touch .GLOBAL_VAR. */
5cd4ec7f 1290 if (alias == gimple_global_var (cfun))
548a6c6d
DN
1291 return true;
1292
02075bb2
DN
1293 /* If ALIAS is an SFT, it can't be touched if the offset
1294 and size of the access is not overlapping with the SFT offset and
1295 size. This is only true if we are accessing through a pointer
1296 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1297 be accessing through a pointer to some substruct of the
1298 structure, and if we try to prune there, we will have the wrong
1299 offset, and get the wrong answer.
1300 i.e., we can't prune without more work if we have something like
6de9cd9a 1301
02075bb2
DN
1302 struct gcc_target
1303 {
1304 struct asm_out
1305 {
1306 const char *byte_op;
1307 struct asm_int_op
1308 {
1309 const char *hi;
1310 } aligned_op;
1311 } asm_out;
1312 } targetm;
1313
1314 foo = &targetm.asm_out.aligned_op;
1315 return foo->hi;
6de9cd9a 1316
02075bb2
DN
1317 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1318 terms of SFT_PARENT_VAR, that is where it is.
1319 However, the access through the foo pointer will be at offset 0. */
1320 if (size != -1
1321 && TREE_CODE (alias) == STRUCT_FIELD_TAG
1322 && base
1323 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1324 && !overlap_subvar (offset, size, alias, NULL))
1325 {
1326#ifdef ACCESS_DEBUGGING
1327 fprintf (stderr, "Access to ");
1328 print_generic_expr (stderr, ref, 0);
1329 fprintf (stderr, " may not touch ");
1330 print_generic_expr (stderr, alias, 0);
1331 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1332#endif
1333 return false;
1334 }
6de9cd9a 1335
02075bb2
DN
1336 /* Without strict aliasing, it is impossible for a component access
1337 through a pointer to touch a random variable, unless that
1338 variable *is* a structure or a pointer.
6de9cd9a 1339
02075bb2
DN
1340 That is, given p->c, and some random global variable b,
1341 there is no legal way that p->c could be an access to b.
1342
1343 Without strict aliasing on, we consider it legal to do something
1344 like:
6de9cd9a 1345
02075bb2
DN
1346 struct foos { int l; };
1347 int foo;
1348 static struct foos *getfoo(void);
1349 int main (void)
1350 {
1351 struct foos *f = getfoo();
1352 f->l = 1;
1353 foo = 2;
1354 if (f->l == 1)
1355 abort();
1356 exit(0);
1357 }
1358 static struct foos *getfoo(void)
1359 { return (struct foos *)&foo; }
1360
1361 (taken from 20000623-1.c)
832a0c1d
DB
1362
1363 The docs also say/imply that access through union pointers
1364 is legal (but *not* if you take the address of the union member,
1365 i.e. the inverse), such that you can do
1366
1367 typedef union {
1368 int d;
1369 } U;
1370
1371 int rv;
1372 void breakme()
1373 {
1374 U *rv0;
1375 U *pretmp = (U*)&rv;
1376 rv0 = pretmp;
1377 rv0->d = 42;
1378 }
1379 To implement this, we just punt on accesses through union
1380 pointers entirely.
02075bb2
DN
1381 */
1382 else if (ref
1383 && flag_strict_aliasing
1384 && TREE_CODE (ref) != INDIRECT_REF
1385 && !MTAG_P (alias)
832a0c1d
DB
1386 && (TREE_CODE (base) != INDIRECT_REF
1387 || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
02075bb2
DN
1388 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1389 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
5da10ac7 1390 && !var_ann (alias)->is_heapvar
aa666e00
AP
1391 /* When the struct has may_alias attached to it, we need not to
1392 return true. */
1393 && get_alias_set (base))
02075bb2
DN
1394 {
1395#ifdef ACCESS_DEBUGGING
1396 fprintf (stderr, "Access to ");
1397 print_generic_expr (stderr, ref, 0);
1398 fprintf (stderr, " may not touch ");
1399 print_generic_expr (stderr, alias, 0);
1400 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1401#endif
1402 return false;
1403 }
6de9cd9a 1404
02075bb2
DN
1405 /* If the offset of the access is greater than the size of one of
1406 the possible aliases, it can't be touching that alias, because it
1407 would be past the end of the structure. */
1408 else if (ref
1409 && flag_strict_aliasing
1410 && TREE_CODE (ref) != INDIRECT_REF
1411 && !MTAG_P (alias)
1412 && !POINTER_TYPE_P (TREE_TYPE (alias))
1413 && offsetgtz
1414 && DECL_SIZE (alias)
1415 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1416 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1417 {
1418#ifdef ACCESS_DEBUGGING
1419 fprintf (stderr, "Access to ");
1420 print_generic_expr (stderr, ref, 0);
1421 fprintf (stderr, " may not touch ");
1422 print_generic_expr (stderr, alias, 0);
1423 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1424#endif
1425 return false;
1426 }
6de9cd9a 1427
02075bb2 1428 return true;
f430bae8
AM
1429}
1430
f430bae8 1431
02075bb2
DN
1432/* Add VAR to the virtual operands array. FLAGS is as in
1433 get_expr_operands. FULL_REF is a tree that contains the entire
1434 pointer dereference expression, if available, or NULL otherwise.
1435 OFFSET and SIZE come from the memory access expression that
d37d06fe 1436 generated this virtual operand. */
02075bb2
DN
1437
1438static void
1439add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1440 tree full_ref, HOST_WIDE_INT offset,
d37d06fe 1441 HOST_WIDE_INT size)
f430bae8 1442{
306219a2 1443 bitmap aliases = NULL;
02075bb2
DN
1444 tree sym;
1445 var_ann_t v_ann;
f430bae8 1446
02075bb2
DN
1447 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1448 v_ann = var_ann (sym);
1449
38635499
DN
1450 /* Mark the statement as having memory operands. */
1451 s_ann->references_memory = true;
1452
02075bb2
DN
1453 /* Mark statements with volatile operands. Optimizers should back
1454 off from statements having volatile operands. */
1455 if (TREE_THIS_VOLATILE (sym) && s_ann)
1456 s_ann->has_volatile_ops = true;
f430bae8 1457
38635499 1458 /* If the variable cannot be modified and this is a VDEF change
02075bb2
DN
1459 it into a VUSE. This happens when read-only variables are marked
1460 call-clobbered and/or aliased to writable variables. So we only
1461 check that this only happens on non-specific stores.
1a24f92f 1462
02075bb2 1463 Note that if this is a specific store, i.e. associated with a
38635499 1464 GIMPLE_MODIFY_STMT, then we can't suppress the VDEF, lest we run
02075bb2 1465 into validation problems.
1a24f92f 1466
02075bb2
DN
1467 This can happen when programs cast away const, leaving us with a
1468 store to read-only memory. If the statement is actually executed
1469 at runtime, then the program is ill formed. If the statement is
1470 not executed then all is well. At the very least, we cannot ICE. */
38635499
DN
1471 if ((flags & opf_implicit) && unmodifiable_var_p (var))
1472 flags &= ~opf_def;
02075bb2
DN
1473
1474 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1475 virtual operands, unless the caller has specifically requested
1476 not to add virtual operands (used when adding operands inside an
1477 ADDR_EXPR expression). */
1478 if (flags & opf_no_vops)
f47c96aa 1479 return;
02075bb2 1480
306219a2
DB
1481 if (MTAG_P (var))
1482 aliases = MTAG_ALIASES (var);
02075bb2
DN
1483 if (aliases == NULL)
1484 {
7faade0f
JH
1485 if (s_ann && !gimple_aliases_computed_p (cfun))
1486 s_ann->has_volatile_ops = true;
02075bb2 1487 /* The variable is not aliased or it is an alias tag. */
38635499
DN
1488 if (flags & opf_def)
1489 append_vdef (var);
02075bb2
DN
1490 else
1491 append_vuse (var);
1492 }
1493 else
1494 {
306219a2
DB
1495 bitmap_iterator bi;
1496 unsigned int i;
02075bb2
DN
1497 tree al;
1498
1499 /* The variable is aliased. Add its aliases to the virtual
1500 operands. */
306219a2 1501 gcc_assert (!bitmap_empty_p (aliases));
02075bb2 1502
38635499 1503 if (flags & opf_def)
02075bb2 1504 {
02075bb2 1505 bool none_added = true;
306219a2 1506 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
02075bb2 1507 {
306219a2 1508 al = referenced_var (i);
02075bb2
DN
1509 if (!access_can_touch_variable (full_ref, al, offset, size))
1510 continue;
1511
1512 none_added = false;
38635499 1513 append_vdef (al);
02075bb2 1514 }
f47c96aa 1515
d37d06fe
DN
1516 /* Even if no aliases have been added, we still need to
1517 establish def-use and use-def chains, lest
1518 transformations think that this is not a memory
1519 reference. For an example of this scenario, see
1520 testsuite/g++.dg/opt/cleanup1.C. */
1521 if (none_added)
1522 append_vdef (var);
02075bb2
DN
1523 }
1524 else
1525 {
1526 bool none_added = true;
306219a2 1527 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
02075bb2 1528 {
306219a2 1529 al = referenced_var (i);
02075bb2
DN
1530 if (!access_can_touch_variable (full_ref, al, offset, size))
1531 continue;
1532 none_added = false;
1533 append_vuse (al);
1534 }
306219a2 1535
d37d06fe
DN
1536 /* Even if no aliases have been added, we still need to
1537 establish def-use and use-def chains, lest
1538 transformations think that this is not a memory
1539 reference. For an example of this scenario, see
1540 testsuite/g++.dg/opt/cleanup1.C. */
1541 if (none_added)
02075bb2
DN
1542 append_vuse (var);
1543 }
1544 }
f47c96aa
AM
1545}
1546
f47c96aa 1547
02075bb2
DN
1548/* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1549 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1550 the statement's real operands, otherwise it is added to virtual
1551 operands. */
1552
1553static void
1554add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
f47c96aa 1555{
02075bb2
DN
1556 tree var, sym;
1557 var_ann_t v_ann;
f47c96aa 1558
38635499 1559 gcc_assert (SSA_VAR_P (*var_p) && s_ann);
f47c96aa 1560
38635499 1561 var = *var_p;
02075bb2
DN
1562 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1563 v_ann = var_ann (sym);
f47c96aa 1564
38635499
DN
1565 /* Mark statements with volatile operands. */
1566 if (TREE_THIS_VOLATILE (sym))
02075bb2 1567 s_ann->has_volatile_ops = true;
f47c96aa 1568
38635499 1569 if (is_gimple_reg (sym))
f47c96aa 1570 {
02075bb2 1571 /* The variable is a GIMPLE register. Add it to real operands. */
38635499 1572 if (flags & opf_def)
02075bb2
DN
1573 append_def (var_p);
1574 else
1575 append_use (var_p);
f47c96aa 1576 }
02075bb2 1577 else
d37d06fe 1578 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1);
02075bb2 1579}
f47c96aa 1580
f47c96aa 1581
02075bb2
DN
1582/* A subroutine of get_expr_operands to handle INDIRECT_REF,
1583 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
f47c96aa 1584
02075bb2
DN
1585 STMT is the statement being processed, EXPR is the INDIRECT_REF
1586 that got us here.
1587
1588 FLAGS is as in get_expr_operands.
1a24f92f 1589
02075bb2
DN
1590 FULL_REF contains the full pointer dereference expression, if we
1591 have it, or NULL otherwise.
1a24f92f 1592
02075bb2
DN
1593 OFFSET and SIZE are the location of the access inside the
1594 dereferenced pointer, if known.
f47c96aa 1595
02075bb2
DN
1596 RECURSE_ON_BASE should be set to true if we want to continue
1597 calling get_expr_operands on the base pointer, and false if
1598 something else will do it for us. */
f47c96aa 1599
02075bb2
DN
1600static void
1601get_indirect_ref_operands (tree stmt, tree expr, int flags,
1602 tree full_ref,
1603 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1604 bool recurse_on_base)
1605{
1606 tree *pptr = &TREE_OPERAND (expr, 0);
1607 tree ptr = *pptr;
1608 stmt_ann_t s_ann = stmt_ann (stmt);
f47c96aa 1609
38635499 1610 s_ann->references_memory = true;
7faade0f
JH
1611 if (s_ann && TREE_THIS_VOLATILE (expr))
1612 s_ann->has_volatile_ops = true;
f47c96aa 1613
02075bb2 1614 if (SSA_VAR_P (ptr))
f47c96aa 1615 {
02075bb2
DN
1616 struct ptr_info_def *pi = NULL;
1617
1618 /* If PTR has flow-sensitive points-to information, use it. */
1619 if (TREE_CODE (ptr) == SSA_NAME
1620 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1621 && pi->name_mem_tag)
f47c96aa 1622 {
02075bb2
DN
1623 /* PTR has its own memory tag. Use it. */
1624 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
d37d06fe 1625 full_ref, offset, size);
f47c96aa 1626 }
02075bb2 1627 else
f47c96aa 1628 {
02075bb2 1629 /* If PTR is not an SSA_NAME or it doesn't have a name
18cd8a03 1630 tag, use its symbol memory tag. */
02075bb2 1631 var_ann_t v_ann;
f47c96aa 1632
02075bb2
DN
1633 /* If we are emitting debugging dumps, display a warning if
1634 PTR is an SSA_NAME with no flow-sensitive alias
1635 information. That means that we may need to compute
1636 aliasing again. */
1637 if (dump_file
1638 && TREE_CODE (ptr) == SSA_NAME
1639 && pi == NULL)
1640 {
1641 fprintf (dump_file,
1642 "NOTE: no flow-sensitive alias info for ");
1643 print_generic_expr (dump_file, ptr, dump_flags);
1644 fprintf (dump_file, " in ");
1645 print_generic_stmt (dump_file, stmt, dump_flags);
1646 }
f430bae8 1647
02075bb2
DN
1648 if (TREE_CODE (ptr) == SSA_NAME)
1649 ptr = SSA_NAME_VAR (ptr);
1650 v_ann = var_ann (ptr);
f430bae8 1651
18cd8a03
DN
1652 if (v_ann->symbol_mem_tag)
1653 add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
d37d06fe 1654 full_ref, offset, size);
7faade0f
JH
1655 /* Aliasing information is missing; mark statement as volatile so we
1656 won't optimize it out too actively. */
1657 else if (s_ann && !gimple_aliases_computed_p (cfun)
1658 && (flags & opf_def))
1659 s_ann->has_volatile_ops = true;
f430bae8
AM
1660 }
1661 }
02075bb2
DN
1662 else if (TREE_CODE (ptr) == INTEGER_CST)
1663 {
1664 /* If a constant is used as a pointer, we can't generate a real
1665 operand for it but we mark the statement volatile to prevent
1666 optimizations from messing things up. */
1667 if (s_ann)
1668 s_ann->has_volatile_ops = true;
1669 return;
1670 }
1671 else
1672 {
1673 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1674 gcc_unreachable ();
1675 }
f430bae8 1676
02075bb2
DN
1677 /* If requested, add a USE operand for the base pointer. */
1678 if (recurse_on_base)
38635499 1679 get_expr_operands (stmt, pptr, opf_use);
f430bae8
AM
1680}
1681
643519b7 1682
02075bb2 1683/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
6de9cd9a
DN
1684
1685static void
02075bb2 1686get_tmr_operands (tree stmt, tree expr, int flags)
6de9cd9a 1687{
38635499 1688 tree tag, ref;
02075bb2
DN
1689 HOST_WIDE_INT offset, size, maxsize;
1690 subvar_t svars, sv;
e288e2f5 1691 stmt_ann_t s_ann = stmt_ann (stmt);
6de9cd9a 1692
38635499
DN
1693 /* This statement references memory. */
1694 s_ann->references_memory = 1;
6de9cd9a 1695
38635499
DN
1696 /* First record the real operands. */
1697 get_expr_operands (stmt, &TMR_BASE (expr), opf_use);
1698 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use);
6de9cd9a 1699
02075bb2 1700 if (TMR_SYMBOL (expr))
38635499 1701 add_to_addressable_set (TMR_SYMBOL (expr), &s_ann->addresses_taken);
6de9cd9a 1702
38635499 1703 tag = TMR_TAG (expr);
02075bb2
DN
1704 if (!tag)
1705 {
1706 /* Something weird, so ensure that we will be careful. */
38635499 1707 s_ann->has_volatile_ops = true;
310de761 1708 return;
02075bb2 1709 }
44de5aeb 1710
02075bb2
DN
1711 if (DECL_P (tag))
1712 {
1713 get_expr_operands (stmt, &tag, flags);
1714 return;
1715 }
643519b7 1716
02075bb2
DN
1717 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1718 gcc_assert (ref != NULL_TREE);
1719 svars = get_subvars_for_var (ref);
1720 for (sv = svars; sv; sv = sv->next)
1721 {
1722 bool exact;
38635499 1723
02075bb2 1724 if (overlap_subvar (offset, maxsize, sv->var, &exact))
38635499 1725 add_stmt_operand (&sv->var, s_ann, flags);
02075bb2
DN
1726 }
1727}
643519b7 1728
7ccf35ed 1729
02075bb2
DN
1730/* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1731 clobbered variables in the function. */
6de9cd9a 1732
02075bb2
DN
1733static void
1734add_call_clobber_ops (tree stmt, tree callee)
1735{
1736 unsigned u;
1737 bitmap_iterator bi;
1738 stmt_ann_t s_ann = stmt_ann (stmt);
1739 bitmap not_read_b, not_written_b;
1740
1741 /* Functions that are not const, pure or never return may clobber
1742 call-clobbered variables. */
1743 if (s_ann)
1744 s_ann->makes_clobbering_call = true;
ac182688 1745
02075bb2
DN
1746 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1747 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
5cd4ec7f 1748 if (gimple_global_var (cfun))
02075bb2 1749 {
5cd4ec7f 1750 tree var = gimple_global_var (cfun);
38635499 1751 add_stmt_operand (&var, s_ann, opf_def);
6de9cd9a 1752 return;
02075bb2 1753 }
6de9cd9a 1754
02075bb2
DN
1755 /* Get info for local and module level statics. There is a bit
1756 set for each static if the call being processed does not read
1757 or write that variable. */
1758 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1759 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
38635499
DN
1760
1761 /* Add a VDEF operand for every call clobbered variable. */
5cd4ec7f 1762 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
02075bb2
DN
1763 {
1764 tree var = referenced_var_lookup (u);
1765 unsigned int escape_mask = var_ann (var)->escape_mask;
1766 tree real_var = var;
1767 bool not_read;
1768 bool not_written;
1769
1770 /* Not read and not written are computed on regular vars, not
1771 subvars, so look at the parent var if this is an SFT. */
1772 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1773 real_var = SFT_PARENT_VAR (var);
1774
1775 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1776 DECL_UID (real_var)) : false;
1777 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1778 DECL_UID (real_var)) : false;
1779 gcc_assert (!unmodifiable_var_p (var));
1780
1781 clobber_stats.clobbered_vars++;
1782
1783 /* See if this variable is really clobbered by this function. */
1784
1785 /* Trivial case: Things escaping only to pure/const are not
1786 clobbered by non-pure-const, and only read by pure/const. */
1787 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1788 {
1789 tree call = get_call_expr_in (stmt);
1790 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1791 {
38635499 1792 add_stmt_operand (&var, s_ann, opf_use);
02075bb2
DN
1793 clobber_stats.unescapable_clobbers_avoided++;
1794 continue;
1795 }
1796 else
1797 {
1798 clobber_stats.unescapable_clobbers_avoided++;
1799 continue;
1800 }
1801 }
1802
1803 if (not_written)
1804 {
1805 clobber_stats.static_write_clobbers_avoided++;
1806 if (!not_read)
38635499 1807 add_stmt_operand (&var, s_ann, opf_use);
02075bb2
DN
1808 else
1809 clobber_stats.static_read_clobbers_avoided++;
1810 }
1811 else
d37d06fe 1812 add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1);
02075bb2 1813 }
02075bb2
DN
1814}
1815
1816
1817/* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1818 function. */
1819
1820static void
1821add_call_read_ops (tree stmt, tree callee)
1822{
1823 unsigned u;
1824 bitmap_iterator bi;
1825 stmt_ann_t s_ann = stmt_ann (stmt);
1826 bitmap not_read_b;
1827
1828 /* if the function is not pure, it may reference memory. Add
1829 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1830 for the heuristic used to decide whether to create .GLOBAL_VAR. */
5cd4ec7f 1831 if (gimple_global_var (cfun))
02075bb2 1832 {
5cd4ec7f 1833 tree var = gimple_global_var (cfun);
38635499 1834 add_stmt_operand (&var, s_ann, opf_use);
02075bb2
DN
1835 return;
1836 }
1837
1838 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1839
1840 /* Add a VUSE for each call-clobbered variable. */
5cd4ec7f 1841 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
02075bb2
DN
1842 {
1843 tree var = referenced_var (u);
1844 tree real_var = var;
1845 bool not_read;
1846
1847 clobber_stats.readonly_clobbers++;
1848
1849 /* Not read and not written are computed on regular vars, not
1850 subvars, so look at the parent var if this is an SFT. */
1851
1852 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1853 real_var = SFT_PARENT_VAR (var);
1854
65ad7c63
DN
1855 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1856 : false;
02075bb2
DN
1857
1858 if (not_read)
1859 {
1860 clobber_stats.static_readonly_clobbers_avoided++;
1861 continue;
1862 }
1863
38635499 1864 add_stmt_operand (&var, s_ann, opf_use | opf_implicit);
02075bb2
DN
1865 }
1866}
1867
1868
1869/* A subroutine of get_expr_operands to handle CALL_EXPR. */
1870
1871static void
1872get_call_expr_operands (tree stmt, tree expr)
1873{
02075bb2 1874 int call_flags = call_expr_flags (expr);
5039610b 1875 int i, nargs;
38635499 1876 stmt_ann_t ann = stmt_ann (stmt);
02075bb2 1877
38635499
DN
1878 ann->references_memory = true;
1879
1880 /* If aliases have been computed already, add VDEF or VUSE
02075bb2 1881 operands for all the symbols that have been found to be
38635499 1882 call-clobbered. */
5cd4ec7f 1883 if (gimple_aliases_computed_p (cfun)
02075bb2
DN
1884 && !(call_flags & ECF_NOVOPS))
1885 {
1886 /* A 'pure' or a 'const' function never call-clobbers anything.
1887 A 'noreturn' function might, but since we don't return anyway
1888 there is no point in recording that. */
1889 if (TREE_SIDE_EFFECTS (expr)
1890 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1891 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1892 else if (!(call_flags & ECF_CONST))
1893 add_call_read_ops (stmt, get_callee_fndecl (expr));
1894 }
1895
1896 /* Find uses in the called function. */
5039610b
SL
1897 get_expr_operands (stmt, &CALL_EXPR_FN (expr), opf_use);
1898 nargs = call_expr_nargs (expr);
1899 for (i = 0; i < nargs; i++)
1900 get_expr_operands (stmt, &CALL_EXPR_ARG (expr, i), opf_use);
02075bb2 1901
5039610b 1902 get_expr_operands (stmt, &CALL_EXPR_STATIC_CHAIN (expr), opf_use);
02075bb2
DN
1903}
1904
1905
1906/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1907
1908static void
1909get_asm_expr_operands (tree stmt)
1910{
38635499
DN
1911 stmt_ann_t s_ann;
1912 int i, noutputs;
1913 const char **oconstraints;
02075bb2
DN
1914 const char *constraint;
1915 bool allows_mem, allows_reg, is_inout;
38635499
DN
1916 tree link;
1917
1918 s_ann = stmt_ann (stmt);
1919 noutputs = list_length (ASM_OUTPUTS (stmt));
1920 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
02075bb2 1921
38635499
DN
1922 /* Gather all output operands. */
1923 for (i = 0, link = ASM_OUTPUTS (stmt); link; i++, link = TREE_CHAIN (link))
02075bb2 1924 {
65ad7c63
DN
1925 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1926 oconstraints[i] = constraint;
1927 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1928 &allows_reg, &is_inout);
02075bb2
DN
1929
1930 /* This should have been split in gimplify_asm_expr. */
1931 gcc_assert (!allows_reg || !is_inout);
1932
1933 /* Memory operands are addressable. Note that STMT needs the
1934 address of this operand. */
1935 if (!allows_reg && allows_mem)
1936 {
1937 tree t = get_base_address (TREE_VALUE (link));
1938 if (t && DECL_P (t) && s_ann)
1939 add_to_addressable_set (t, &s_ann->addresses_taken);
1940 }
1941
38635499 1942 get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
02075bb2
DN
1943 }
1944
38635499 1945 /* Gather all input operands. */
02075bb2
DN
1946 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1947 {
1948 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
38635499
DN
1949 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
1950 &allows_mem, &allows_reg);
02075bb2
DN
1951
1952 /* Memory operands are addressable. Note that STMT needs the
1953 address of this operand. */
1954 if (!allows_reg && allows_mem)
1955 {
1956 tree t = get_base_address (TREE_VALUE (link));
1957 if (t && DECL_P (t) && s_ann)
1958 add_to_addressable_set (t, &s_ann->addresses_taken);
1959 }
1960
1961 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1962 }
1963
38635499 1964 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
02075bb2
DN
1965 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1966 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1967 {
1968 unsigned i;
1969 bitmap_iterator bi;
1970
38635499
DN
1971 s_ann->references_memory = true;
1972
1973 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
5cd4ec7f 1974 {
38635499
DN
1975 tree var = referenced_var (i);
1976 add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
5cd4ec7f 1977 }
02075bb2 1978
5cd4ec7f 1979 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
38635499
DN
1980 {
1981 tree var = referenced_var (i);
1982
1983 /* Subvars are explicitly represented in this list, so we
1984 don't need the original to be added to the clobber ops,
1985 but the original *will* be in this list because we keep
1986 the addressability of the original variable up-to-date
1987 to avoid confusing the back-end. */
1988 if (var_can_have_subvars (var)
1989 && get_subvars_for_var (var) != NULL)
1990 continue;
1991
1992 add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
1993 }
02075bb2
DN
1994 break;
1995 }
1996}
1997
1998
65ad7c63
DN
1999/* Scan operands for the assignment expression EXPR in statement STMT. */
2000
2001static void
07beea0d 2002get_modify_stmt_operands (tree stmt, tree expr)
65ad7c63
DN
2003{
2004 /* First get operands from the RHS. */
38635499 2005 get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 1), opf_use);
65ad7c63 2006
38635499
DN
2007 /* For the LHS, use a regular definition (opf_def) for GIMPLE
2008 registers. If the LHS is a store to memory, we will need
2009 a preserving definition (VDEF).
65ad7c63
DN
2010
2011 Preserving definitions are those that modify a part of an
2012 aggregate object for which no subvars have been computed (or the
2013 reference does not correspond exactly to one of them). Stores
38635499 2014 through a pointer are also represented with VDEF operators.
65ad7c63 2015
38635499
DN
2016 We used to distinguish between preserving and killing definitions.
2017 We always emit preserving definitions now. */
2018 get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 0), opf_def);
65ad7c63
DN
2019}
2020
2021
02075bb2 2022/* Recursively scan the expression pointed to by EXPR_P in statement
65ad7c63
DN
2023 STMT. FLAGS is one of the OPF_* constants modifying how to
2024 interpret the operands found. */
02075bb2
DN
2025
2026static void
2027get_expr_operands (tree stmt, tree *expr_p, int flags)
2028{
2029 enum tree_code code;
2030 enum tree_code_class class;
2031 tree expr = *expr_p;
2032 stmt_ann_t s_ann = stmt_ann (stmt);
2033
2034 if (expr == NULL)
2035 return;
2036
2037 code = TREE_CODE (expr);
2038 class = TREE_CODE_CLASS (code);
2039
2040 switch (code)
2041 {
2042 case ADDR_EXPR:
2043 /* Taking the address of a variable does not represent a
2044 reference to it, but the fact that the statement takes its
2045 address will be of interest to some passes (e.g. alias
2046 resolution). */
2047 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
2048
2049 /* If the address is invariant, there may be no interesting
2050 variable references inside. */
2051 if (is_gimple_min_invariant (expr))
2052 return;
2053
2054 /* Otherwise, there may be variables referenced inside but there
2055 should be no VUSEs created, since the referenced objects are
2056 not really accessed. The only operands that we should find
2057 here are ARRAY_REF indices which will always be real operands
2058 (GIMPLE does not allow non-registers as array indices). */
2059 flags |= opf_no_vops;
2060 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2061 return;
2062
2063 case SSA_NAME:
2064 case STRUCT_FIELD_TAG:
18cd8a03 2065 case SYMBOL_MEMORY_TAG:
02075bb2
DN
2066 case NAME_MEMORY_TAG:
2067 add_stmt_operand (expr_p, s_ann, flags);
2068 return;
2069
2070 case VAR_DECL:
2071 case PARM_DECL:
2072 case RESULT_DECL:
2073 {
2074 subvar_t svars;
2075
65ad7c63 2076 /* Add the subvars for a variable, if it has subvars, to DEFS
02075bb2
DN
2077 or USES. Otherwise, add the variable itself. Whether it
2078 goes to USES or DEFS depends on the operand flags. */
2079 if (var_can_have_subvars (expr)
2080 && (svars = get_subvars_for_var (expr)))
2081 {
2082 subvar_t sv;
2083 for (sv = svars; sv; sv = sv->next)
2084 add_stmt_operand (&sv->var, s_ann, flags);
2085 }
2086 else
2087 add_stmt_operand (expr_p, s_ann, flags);
2088
2089 return;
2090 }
2091
2092 case MISALIGNED_INDIRECT_REF:
2093 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2094 /* fall through */
2095
2096 case ALIGN_INDIRECT_REF:
2097 case INDIRECT_REF:
65ad7c63 2098 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
02075bb2
DN
2099 return;
2100
2101 case TARGET_MEM_REF:
2102 get_tmr_operands (stmt, expr, flags);
2103 return;
2104
02075bb2 2105 case ARRAY_REF:
65ad7c63 2106 case ARRAY_RANGE_REF:
02075bb2
DN
2107 case COMPONENT_REF:
2108 case REALPART_EXPR:
2109 case IMAGPART_EXPR:
2110 {
c75ab022 2111 tree ref;
6bec9271 2112 HOST_WIDE_INT offset, size, maxsize;
758cf3f2 2113 bool none = true;
c75ab022 2114
643519b7
DN
2115 /* This component reference becomes an access to all of the
2116 subvariables it can touch, if we can determine that, but
2117 *NOT* the real one. If we can't determine which fields we
2118 could touch, the recursion will eventually get to a
2119 variable and add *all* of its subvars, or whatever is the
2120 minimum correct subset. */
6bec9271
RG
2121 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
2122 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
643519b7 2123 {
c75ab022 2124 subvar_t sv;
643519b7
DN
2125 subvar_t svars = get_subvars_for_var (ref);
2126
c75ab022
DB
2127 for (sv = svars; sv; sv = sv->next)
2128 {
2129 bool exact;
643519b7 2130
3c0b6c43 2131 if (overlap_subvar (offset, maxsize, sv->var, &exact))
c75ab022 2132 {
98b6d477 2133 int subvar_flags = flags;
758cf3f2 2134 none = false;
7fac66d4 2135 add_stmt_operand (&sv->var, s_ann, subvar_flags);
c75ab022
DB
2136 }
2137 }
643519b7 2138
758cf3f2
RG
2139 if (!none)
2140 flags |= opf_no_vops;
c75ab022 2141 }
3c0b6c43
DB
2142 else if (TREE_CODE (ref) == INDIRECT_REF)
2143 {
65ad7c63
DN
2144 get_indirect_ref_operands (stmt, ref, flags, expr, offset,
2145 maxsize, false);
3c0b6c43
DB
2146 flags |= opf_no_vops;
2147 }
758cf3f2
RG
2148
2149 /* Even if we found subvars above we need to ensure to see
2150 immediate uses for d in s.a[d]. In case of s.a having
65ad7c63 2151 a subvar or we would miss it otherwise. */
38635499 2152 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
c75ab022
DB
2153
2154 if (code == COMPONENT_REF)
305a1321 2155 {
707db096 2156 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
305a1321 2157 s_ann->has_volatile_ops = true;
38635499 2158 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
305a1321 2159 }
65ad7c63 2160 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
a916f21d 2161 {
38635499
DN
2162 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2163 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
2164 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_use);
a916f21d 2165 }
643519b7 2166
c75ab022
DB
2167 return;
2168 }
643519b7 2169
d25cee4d 2170 case WITH_SIZE_EXPR:
0e28378a 2171 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
d25cee4d 2172 and an rvalue reference to its second argument. */
38635499 2173 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
1a24f92f 2174 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
d25cee4d
RH
2175 return;
2176
310de761 2177 case CALL_EXPR:
1a24f92f 2178 get_call_expr_operands (stmt, expr);
6de9cd9a 2179 return;
6de9cd9a 2180
40923b20 2181 case COND_EXPR:
ad9f20cb 2182 case VEC_COND_EXPR:
38635499
DN
2183 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
2184 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2185 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
40923b20
DP
2186 return;
2187
07beea0d
AH
2188 case GIMPLE_MODIFY_STMT:
2189 get_modify_stmt_operands (stmt, expr);
65ad7c63 2190 return;
6de9cd9a 2191
7b48e1e0
RH
2192 case CONSTRUCTOR:
2193 {
2194 /* General aggregate CONSTRUCTORs have been decomposed, but they
2195 are still in use as the COMPLEX_EXPR equivalent for vectors. */
4038c495
GB
2196 constructor_elt *ce;
2197 unsigned HOST_WIDE_INT idx;
7b48e1e0 2198
4038c495
GB
2199 for (idx = 0;
2200 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
2201 idx++)
38635499 2202 get_expr_operands (stmt, &ce->value, opf_use);
7b48e1e0
RH
2203
2204 return;
2205 }
2206
310de761 2207 case BIT_FIELD_REF:
65ad7c63 2208 case TRUTH_NOT_EXPR:
4626c433 2209 case VIEW_CONVERT_EXPR:
310de761 2210 do_unary:
1a24f92f 2211 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
6de9cd9a 2212 return;
6de9cd9a 2213
310de761
RH
2214 case TRUTH_AND_EXPR:
2215 case TRUTH_OR_EXPR:
2216 case TRUTH_XOR_EXPR:
2217 case COMPOUND_EXPR:
2218 case OBJ_TYPE_REF:
0bca51f0 2219 case ASSERT_EXPR:
310de761
RH
2220 do_binary:
2221 {
1a24f92f
AM
2222 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2223 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
310de761
RH
2224 return;
2225 }
2226
20f06221 2227 case DOT_PROD_EXPR:
7ccf35ed
DN
2228 case REALIGN_LOAD_EXPR:
2229 {
2230 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2231 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2232 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2233 return;
2234 }
2235
310de761
RH
2236 case BLOCK:
2237 case FUNCTION_DECL:
2238 case EXC_PTR_EXPR:
2239 case FILTER_EXPR:
2240 case LABEL_DECL:
243cdfa8 2241 case CONST_DECL:
50674e96
DN
2242 case OMP_PARALLEL:
2243 case OMP_SECTIONS:
2244 case OMP_FOR:
50674e96
DN
2245 case OMP_SINGLE:
2246 case OMP_MASTER:
2247 case OMP_ORDERED:
2248 case OMP_CRITICAL:
777f7f9a
RH
2249 case OMP_RETURN:
2250 case OMP_CONTINUE:
02075bb2 2251 /* Expressions that make no memory references. */
310de761 2252 return;
02075bb2
DN
2253
2254 default:
2255 if (class == tcc_unary)
2256 goto do_unary;
2257 if (class == tcc_binary || class == tcc_comparison)
2258 goto do_binary;
2259 if (class == tcc_constant || class == tcc_type)
2260 return;
643519b7 2261 }
310de761 2262
02075bb2
DN
2263 /* If we get here, something has gone wrong. */
2264#ifdef ENABLE_CHECKING
2265 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2266 debug_tree (expr);
2267 fputs ("\n", stderr);
2268#endif
2269 gcc_unreachable ();
310de761
RH
2270}
2271
643519b7 2272
65ad7c63
DN
2273/* Parse STMT looking for operands. When finished, the various
2274 build_* operand vectors will have potential operands in them. */
2275
ac182688 2276static void
02075bb2 2277parse_ssa_operands (tree stmt)
ac182688 2278{
02075bb2 2279 enum tree_code code;
ac182688 2280
02075bb2
DN
2281 code = TREE_CODE (stmt);
2282 switch (code)
2283 {
07beea0d
AH
2284 case GIMPLE_MODIFY_STMT:
2285 get_modify_stmt_operands (stmt, stmt);
02075bb2
DN
2286 break;
2287
2288 case COND_EXPR:
38635499 2289 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_use);
02075bb2
DN
2290 break;
2291
2292 case SWITCH_EXPR:
38635499 2293 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_use);
02075bb2
DN
2294 break;
2295
2296 case ASM_EXPR:
2297 get_asm_expr_operands (stmt);
2298 break;
2299
2300 case RETURN_EXPR:
38635499 2301 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_use);
02075bb2
DN
2302 break;
2303
2304 case GOTO_EXPR:
38635499 2305 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_use);
02075bb2
DN
2306 break;
2307
2308 case LABEL_EXPR:
38635499 2309 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_use);
02075bb2
DN
2310 break;
2311
02075bb2
DN
2312 case BIND_EXPR:
2313 case CASE_LABEL_EXPR:
2314 case TRY_CATCH_EXPR:
2315 case TRY_FINALLY_EXPR:
2316 case EH_FILTER_EXPR:
2317 case CATCH_EXPR:
2318 case RESX_EXPR:
65ad7c63 2319 /* These nodes contain no variable references. */
38635499 2320 break;
02075bb2
DN
2321
2322 default:
65ad7c63
DN
2323 /* Notice that if get_expr_operands tries to use &STMT as the
2324 operand pointer (which may only happen for USE operands), we
2325 will fail in add_stmt_operand. This default will handle
2326 statements like empty statements, or CALL_EXPRs that may
2327 appear on the RHS of a statement or as statements themselves. */
38635499 2328 get_expr_operands (stmt, &stmt, opf_use);
02075bb2 2329 break;
9be7ee44 2330 }
ac182688
ZD
2331}
2332
643519b7 2333
02075bb2 2334/* Create an operands cache for STMT. */
310de761
RH
2335
2336static void
02075bb2 2337build_ssa_operands (tree stmt)
310de761 2338{
02075bb2
DN
2339 stmt_ann_t ann = get_stmt_ann (stmt);
2340
38635499
DN
2341 /* Initially assume that the statement has no volatile operands and
2342 makes no memory references. */
2343 ann->has_volatile_ops = false;
2344 ann->references_memory = false;
d586d6d1
JH
2345 /* Just clear the bitmap so we don't end up reallocating it over and over. */
2346 if (ann->addresses_taken)
2347 bitmap_clear (ann->addresses_taken);
310de761 2348
02075bb2 2349 start_ssa_stmt_operands ();
02075bb2
DN
2350 parse_ssa_operands (stmt);
2351 operand_build_sort_virtual (build_vuses);
38635499 2352 operand_build_sort_virtual (build_vdefs);
02075bb2 2353 finalize_ssa_stmt_operands (stmt);
38635499 2354
d586d6d1
JH
2355 if (ann->addresses_taken && bitmap_empty_p (ann->addresses_taken))
2356 ann->addresses_taken = NULL;
38635499
DN
2357 /* For added safety, assume that statements with volatile operands
2358 also reference memory. */
2359 if (ann->has_volatile_ops)
2360 ann->references_memory = true;
02075bb2 2361}
e288e2f5 2362
e288e2f5 2363
02075bb2 2364/* Free any operands vectors in OPS. */
65ad7c63 2365
02075bb2
DN
2366void
2367free_ssa_operands (stmt_operands_p ops)
2368{
2369 ops->def_ops = NULL;
2370 ops->use_ops = NULL;
38635499 2371 ops->vdef_ops = NULL;
02075bb2 2372 ops->vuse_ops = NULL;
38635499
DN
2373 BITMAP_FREE (ops->loads);
2374 BITMAP_FREE (ops->stores);
310de761
RH
2375}
2376
3c0b6c43 2377
2434ab1d 2378/* Get the operands of statement STMT. */
643519b7 2379
02075bb2
DN
2380void
2381update_stmt_operands (tree stmt)
2382{
2383 stmt_ann_t ann = get_stmt_ann (stmt);
3c0b6c43 2384
65ad7c63
DN
2385 /* If update_stmt_operands is called before SSA is initialized, do
2386 nothing. */
02075bb2
DN
2387 if (!ssa_operands_active ())
2388 return;
943261d7 2389
02075bb2
DN
2390 /* The optimizers cannot handle statements that are nothing but a
2391 _DECL. This indicates a bug in the gimplifier. */
2392 gcc_assert (!SSA_VAR_P (stmt));
6de9cd9a 2393
02075bb2 2394 timevar_push (TV_TREE_OPS);
943261d7 2395
38635499 2396 gcc_assert (ann->modified);
02075bb2 2397 build_ssa_operands (stmt);
02075bb2 2398 ann->modified = 0;
6de9cd9a 2399
02075bb2
DN
2400 timevar_pop (TV_TREE_OPS);
2401}
faf7c678 2402
65ad7c63 2403
02075bb2 2404/* Copies virtual operands from SRC to DST. */
3c0b6c43 2405
02075bb2
DN
2406void
2407copy_virtual_operands (tree dest, tree src)
6de9cd9a 2408{
b2bcf557 2409 unsigned int i, n;
79f99d42
AM
2410 voptype_p src_vuses, dest_vuses;
2411 voptype_p src_vdefs, dest_vdefs;
2412 struct voptype_d vuse;
2413 struct voptype_d vdef;
38635499
DN
2414 stmt_ann_t dest_ann;
2415
2416 VDEF_OPS (dest) = NULL;
2417 VUSE_OPS (dest) = NULL;
02075bb2 2418
38635499
DN
2419 dest_ann = get_stmt_ann (dest);
2420 BITMAP_FREE (dest_ann->operands.loads);
2421 BITMAP_FREE (dest_ann->operands.stores);
02075bb2 2422
38635499 2423 if (LOADED_SYMS (src))
6de9cd9a 2424 {
497f1b81 2425 dest_ann->operands.loads = BITMAP_ALLOC (&operands_bitmap_obstack);
38635499 2426 bitmap_copy (dest_ann->operands.loads, LOADED_SYMS (src));
6de9cd9a 2427 }
02075bb2 2428
38635499 2429 if (STORED_SYMS (src))
6de9cd9a 2430 {
497f1b81 2431 dest_ann->operands.stores = BITMAP_ALLOC (&operands_bitmap_obstack);
38635499 2432 bitmap_copy (dest_ann->operands.stores, STORED_SYMS (src));
02075bb2 2433 }
6de9cd9a 2434
38635499
DN
2435 /* Copy all the VUSE operators and corresponding operands. */
2436 dest_vuses = &vuse;
2437 for (src_vuses = VUSE_OPS (src); src_vuses; src_vuses = src_vuses->next)
02075bb2 2438 {
38635499 2439 n = VUSE_NUM (src_vuses);
79f99d42 2440 dest_vuses = add_vuse_op (dest, NULL_TREE, n, dest_vuses);
38635499
DN
2441 for (i = 0; i < n; i++)
2442 SET_USE (VUSE_OP_PTR (dest_vuses, i), VUSE_OP (src_vuses, i));
2443
2444 if (VUSE_OPS (dest) == NULL)
2445 VUSE_OPS (dest) = vuse.next;
02075bb2 2446 }
6de9cd9a 2447
38635499
DN
2448 /* Copy all the VDEF operators and corresponding operands. */
2449 dest_vdefs = &vdef;
2450 for (src_vdefs = VDEF_OPS (src); src_vdefs; src_vdefs = src_vdefs->next)
2451 {
2452 n = VUSE_NUM (src_vdefs);
79f99d42 2453 dest_vdefs = add_vdef_op (dest, NULL_TREE, n, dest_vdefs);
38635499
DN
2454 VDEF_RESULT (dest_vdefs) = VDEF_RESULT (src_vdefs);
2455 for (i = 0; i < n; i++)
2456 SET_USE (VUSE_OP_PTR (dest_vdefs, i), VUSE_OP (src_vdefs, i));
2457
2458 if (VDEF_OPS (dest) == NULL)
2459 VDEF_OPS (dest) = vdef.next;
2460 }
02075bb2 2461}
a6c550f9 2462
a6c550f9 2463
02075bb2
DN
2464/* Specifically for use in DOM's expression analysis. Given a store, we
2465 create an artificial stmt which looks like a load from the store, this can
2466 be used to eliminate redundant loads. OLD_OPS are the operands from the
2467 store stmt, and NEW_STMT is the new load which represents a load of the
2468 values stored. */
2469
2470void
cfaab3a9 2471create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt)
02075bb2 2472{
02075bb2
DN
2473 tree op;
2474 ssa_op_iter iter;
2475 use_operand_p use_p;
38635499 2476 unsigned i;
02075bb2 2477
38635499 2478 get_stmt_ann (new_stmt);
02075bb2 2479
38635499 2480 /* Process NEW_STMT looking for operands. */
02075bb2
DN
2481 start_ssa_stmt_operands ();
2482 parse_ssa_operands (new_stmt);
a6c550f9 2483
38635499
DN
2484 for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
2485 if (TREE_CODE (op) != SSA_NAME)
2486 var_ann (op)->in_vuse_list = false;
02075bb2 2487
38635499
DN
2488 for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
2489 if (TREE_CODE (op) != SSA_NAME)
2490 var_ann (op)->in_vdef_list = false;
6de9cd9a 2491
02075bb2 2492 /* Remove any virtual operands that were found. */
38635499 2493 VEC_truncate (tree, build_vdefs, 0);
02075bb2 2494 VEC_truncate (tree, build_vuses, 0);
faf7c678 2495
02075bb2 2496 /* For each VDEF on the original statement, we want to create a
38635499
DN
2497 VUSE of the VDEF result operand on the new statement. */
2498 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, SSA_OP_VDEF)
02075bb2 2499 append_vuse (op);
38635499 2500
02075bb2 2501 finalize_ssa_stmt_operands (new_stmt);
3c0b6c43 2502
02075bb2
DN
2503 /* All uses in this fake stmt must not be in the immediate use lists. */
2504 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2505 delink_imm_use (use_p);
2506}
3c0b6c43 2507
3c0b6c43 2508
02075bb2
DN
2509/* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2510 to test the validity of the swap operation. */
faf7c678 2511
02075bb2
DN
2512void
2513swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2514{
2515 tree op0, op1;
2516 op0 = *exp0;
2517 op1 = *exp1;
3c0b6c43 2518
65ad7c63
DN
2519 /* If the operand cache is active, attempt to preserve the relative
2520 positions of these two operands in their respective immediate use
2521 lists. */
02075bb2
DN
2522 if (ssa_operands_active () && op0 != op1)
2523 {
2524 use_optype_p use0, use1, ptr;
2525 use0 = use1 = NULL;
3c0b6c43 2526
02075bb2
DN
2527 /* Find the 2 operands in the cache, if they are there. */
2528 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2529 if (USE_OP_PTR (ptr)->use == exp0)
2530 {
2531 use0 = ptr;
2532 break;
2533 }
3c0b6c43 2534
02075bb2
DN
2535 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2536 if (USE_OP_PTR (ptr)->use == exp1)
2537 {
2538 use1 = ptr;
2539 break;
2540 }
2541
2542 /* If both uses don't have operand entries, there isn't much we can do
65ad7c63 2543 at this point. Presumably we don't need to worry about it. */
02075bb2
DN
2544 if (use0 && use1)
2545 {
2546 tree *tmp = USE_OP_PTR (use1)->use;
2547 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2548 USE_OP_PTR (use0)->use = tmp;
2549 }
3c0b6c43 2550 }
02075bb2
DN
2551
2552 /* Now swap the data. */
2553 *exp0 = op1;
2554 *exp1 = op0;
3c0b6c43
DB
2555}
2556
643519b7 2557
e8ca4159
DN
2558/* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2559 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2560 a single variable whose address has been taken or any other valid
2561 GIMPLE memory reference (structure reference, array, etc). If the
2562 base address of REF is a decl that has sub-variables, also add all
2563 of its sub-variables. */
6de9cd9a 2564
e8ca4159
DN
2565void
2566add_to_addressable_set (tree ref, bitmap *addresses_taken)
6de9cd9a 2567{
e8ca4159 2568 tree var;
c75ab022 2569 subvar_t svars;
c75ab022 2570
e8ca4159
DN
2571 gcc_assert (addresses_taken);
2572
23e66a36 2573 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
e8ca4159
DN
2574 as the only thing we take the address of. If VAR is a structure,
2575 taking the address of a field means that the whole structure may
2576 be referenced using pointer arithmetic. See PR 21407 and the
2577 ensuing mailing list discussion. */
2578 var = get_base_address (ref);
6de9cd9a
DN
2579 if (var && SSA_VAR_P (var))
2580 {
e8ca4159
DN
2581 if (*addresses_taken == NULL)
2582 *addresses_taken = BITMAP_GGC_ALLOC ();
c75ab022 2583
c75ab022
DB
2584 if (var_can_have_subvars (var)
2585 && (svars = get_subvars_for_var (var)))
2586 {
2587 subvar_t sv;
2588 for (sv = svars; sv; sv = sv->next)
e8ca4159
DN
2589 {
2590 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2591 TREE_ADDRESSABLE (sv->var) = 1;
2592 }
c75ab022 2593 }
9044951e 2594 else
e8ca4159
DN
2595 {
2596 bitmap_set_bit (*addresses_taken, DECL_UID (var));
2597 TREE_ADDRESSABLE (var) = 1;
2598 }
6de9cd9a
DN
2599 }
2600}
2601
643519b7 2602
f430bae8 2603/* Scan the immediate_use list for VAR making sure its linked properly.
65ad7c63 2604 Return TRUE if there is a problem and emit an error message to F. */
f430bae8
AM
2605
2606bool
2607verify_imm_links (FILE *f, tree var)
2608{
f47c96aa 2609 use_operand_p ptr, prev, list;
f430bae8
AM
2610 int count;
2611
2612 gcc_assert (TREE_CODE (var) == SSA_NAME);
2613
2614 list = &(SSA_NAME_IMM_USE_NODE (var));
2615 gcc_assert (list->use == NULL);
2616
2617 if (list->prev == NULL)
2618 {
2619 gcc_assert (list->next == NULL);
2620 return false;
2621 }
2622
2623 prev = list;
2624 count = 0;
2625 for (ptr = list->next; ptr != list; )
2626 {
2627 if (prev != ptr->prev)
0e61db61
NS
2628 goto error;
2629
f430bae8 2630 if (ptr->use == NULL)
0e61db61
NS
2631 goto error; /* 2 roots, or SAFE guard node. */
2632 else if (*(ptr->use) != var)
2633 goto error;
f430bae8
AM
2634
2635 prev = ptr;
2636 ptr = ptr->next;
643519b7
DN
2637
2638 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2639 problem. */
e84d8064 2640 if (count++ > 50000000)
0e61db61 2641 goto error;
f430bae8
AM
2642 }
2643
2644 /* Verify list in the other direction. */
2645 prev = list;
2646 for (ptr = list->prev; ptr != list; )
2647 {
2648 if (prev != ptr->next)
0e61db61 2649 goto error;
f430bae8
AM
2650 prev = ptr;
2651 ptr = ptr->prev;
2652 if (count-- < 0)
0e61db61 2653 goto error;
f430bae8
AM
2654 }
2655
2656 if (count != 0)
0e61db61 2657 goto error;
f430bae8
AM
2658
2659 return false;
0e61db61
NS
2660
2661 error:
2662 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2663 {
2664 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2665 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2666 }
2667 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2668 (void *)ptr->use);
2669 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2670 fprintf(f, "\n");
2671 return true;
f430bae8
AM
2672}
2673
2674
2675/* Dump all the immediate uses to FILE. */
2676
2677void
2678dump_immediate_uses_for (FILE *file, tree var)
2679{
2680 imm_use_iterator iter;
2681 use_operand_p use_p;
2682
2683 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2684
2685 print_generic_expr (file, var, TDF_SLIM);
2686 fprintf (file, " : -->");
2687 if (has_zero_uses (var))
2688 fprintf (file, " no uses.\n");
2689 else
2690 if (has_single_use (var))
2691 fprintf (file, " single use.\n");
2692 else
2693 fprintf (file, "%d uses.\n", num_imm_uses (var));
2694
2695 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2696 {
afd83fe4
AM
2697 if (use_p->stmt == NULL && use_p->use == NULL)
2698 fprintf (file, "***end of stmt iterator marker***\n");
f47c96aa 2699 else
afd83fe4 2700 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
38635499 2701 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS|TDF_MEMSYMS);
afd83fe4
AM
2702 else
2703 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
f430bae8
AM
2704 }
2705 fprintf(file, "\n");
2706}
2707
643519b7 2708
f430bae8
AM
2709/* Dump all the immediate uses to FILE. */
2710
2711void
2712dump_immediate_uses (FILE *file)
2713{
2714 tree var;
2715 unsigned int x;
2716
2717 fprintf (file, "Immediate_uses: \n\n");
2718 for (x = 1; x < num_ssa_names; x++)
2719 {
2720 var = ssa_name(x);
2721 if (!var)
2722 continue;
2723 dump_immediate_uses_for (file, var);
2724 }
2725}
2726
2727
2728/* Dump def-use edges on stderr. */
2729
2730void
2731debug_immediate_uses (void)
2732{
2733 dump_immediate_uses (stderr);
2734}
2735
65ad7c63 2736
f430bae8
AM
2737/* Dump def-use edges on stderr. */
2738
2739void
2740debug_immediate_uses_for (tree var)
2741{
2742 dump_immediate_uses_for (stderr, var);
1a24f92f 2743}
cfaab3a9
DN
2744
2745
2746/* Create a new change buffer for the statement pointed by STMT_P and
2747 push the buffer into SCB_STACK. Each change buffer
2748 records state information needed to determine what changed in the
2749 statement. Mainly, this keeps track of symbols that may need to be
2750 put into SSA form, SSA name replacements and other information
2751 needed to keep the SSA form up to date. */
2752
2753void
2754push_stmt_changes (tree *stmt_p)
2755{
2756 tree stmt;
2757 scb_t buf;
2758
2759 stmt = *stmt_p;
2760
2761 /* It makes no sense to keep track of PHI nodes. */
2762 if (TREE_CODE (stmt) == PHI_NODE)
2763 return;
2764
2765 buf = xmalloc (sizeof *buf);
2766 memset (buf, 0, sizeof *buf);
2767
2768 buf->stmt_p = stmt_p;
2769
2770 if (stmt_references_memory_p (stmt))
2771 {
2772 tree op;
2773 ssa_op_iter i;
2774
2775 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
2776 {
2777 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2778 if (buf->loads == NULL)
2779 buf->loads = BITMAP_ALLOC (NULL);
2780 bitmap_set_bit (buf->loads, DECL_UID (sym));
2781 }
2782
38635499 2783 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
cfaab3a9
DN
2784 {
2785 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2786 if (buf->stores == NULL)
2787 buf->stores = BITMAP_ALLOC (NULL);
2788 bitmap_set_bit (buf->stores, DECL_UID (sym));
2789 }
2790 }
2791
2792 VEC_safe_push (scb_t, heap, scb_stack, buf);
2793}
2794
2795
2796/* Given two sets S1 and S2, mark the symbols that differ in S1 and S2
2797 for renaming. The set to mark for renaming is (S1 & ~S2) | (S2 & ~S1). */
2798
2799static void
2800mark_difference_for_renaming (bitmap s1, bitmap s2)
2801{
2802 if (s1 == NULL && s2 == NULL)
2803 return;
2804
2805 if (s1 && s2 == NULL)
2806 mark_set_for_renaming (s1);
2807 else if (s1 == NULL && s2)
2808 mark_set_for_renaming (s2);
2809 else if (!bitmap_equal_p (s1, s2))
2810 {
2811 bitmap t1 = BITMAP_ALLOC (NULL);
2812 bitmap t2 = BITMAP_ALLOC (NULL);
2813
2814 bitmap_and_compl (t1, s1, s2);
2815 bitmap_and_compl (t2, s2, s1);
2816 bitmap_ior_into (t1, t2);
2817 mark_set_for_renaming (t1);
2818
2819 BITMAP_FREE (t1);
2820 BITMAP_FREE (t2);
2821 }
2822}
2823
2824
2825/* Pop the top SCB from SCB_STACK and act on the differences between
2826 what was recorded by push_stmt_changes and the current state of
2827 the statement. */
2828
2829void
2830pop_stmt_changes (tree *stmt_p)
2831{
2832 tree op, stmt;
2833 ssa_op_iter iter;
2834 bitmap loads, stores;
2835 scb_t buf;
2836
2837 stmt = *stmt_p;
2838
2839 /* It makes no sense to keep track of PHI nodes. */
2840 if (TREE_CODE (stmt) == PHI_NODE)
2841 return;
2842
2843 buf = VEC_pop (scb_t, scb_stack);
2844 gcc_assert (stmt_p == buf->stmt_p);
2845
2846 /* Force an operand re-scan on the statement and mark any newly
2847 exposed variables. */
2848 update_stmt (stmt);
2849
2850 /* Determine whether any memory symbols need to be renamed. If the
2851 sets of loads and stores are different after the statement is
2852 modified, then the affected symbols need to be renamed.
2853
2854 Note that it may be possible for the statement to not reference
2855 memory anymore, but we still need to act on the differences in
2856 the sets of symbols. */
2857 loads = stores = NULL;
2858 if (stmt_references_memory_p (stmt))
2859 {
2860 tree op;
2861 ssa_op_iter i;
2862
2863 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
2864 {
2865 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2866 if (loads == NULL)
2867 loads = BITMAP_ALLOC (NULL);
2868 bitmap_set_bit (loads, DECL_UID (sym));
2869 }
2870
38635499 2871 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
cfaab3a9
DN
2872 {
2873 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2874 if (stores == NULL)
2875 stores = BITMAP_ALLOC (NULL);
2876 bitmap_set_bit (stores, DECL_UID (sym));
cfaab3a9
DN
2877 }
2878 }
2879
2880 /* If LOADS is different from BUF->LOADS, the affected
2881 symbols need to be marked for renaming. */
2882 mark_difference_for_renaming (loads, buf->loads);
2883
2884 /* Similarly for STORES and BUF->STORES. */
2885 mark_difference_for_renaming (stores, buf->stores);
2886
2887 /* Mark all the naked GIMPLE register operands for renaming. */
2888 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF|SSA_OP_USE)
2889 if (DECL_P (op))
2890 mark_sym_for_renaming (op);
2891
2892 /* FIXME, need to add more finalizers here. Cleanup EH info,
2893 recompute invariants for address expressions, add
2894 SSA replacement mappings, etc. For instance, given
2895 testsuite/gcc.c-torture/compile/pr16808.c, we fold a statement of
2896 the form:
2897
2898 # SMT.4_20 = VDEF <SMT.4_16>
2899 D.1576_11 = 1.0e+0;
2900
2901 So, the VDEF will disappear, but instead of marking SMT.4 for
2902 renaming it would be far more efficient to establish a
2903 replacement mapping that would replace every reference of
2904 SMT.4_20 with SMT.4_16. */
2905
2906 /* Free memory used by the buffer. */
2907 BITMAP_FREE (buf->loads);
2908 BITMAP_FREE (buf->stores);
2909 BITMAP_FREE (loads);
2910 BITMAP_FREE (stores);
2911 buf->stmt_p = NULL;
2912 free (buf);
2913}
2914
2915
2916/* Discard the topmost change buffer from SCB_STACK. This is useful
2917 when the caller realized that it did not actually modified the
2918 statement. It avoids the expensive operand re-scan. */
2919
2920void
2921discard_stmt_changes (tree *stmt_p)
2922{
2923 scb_t buf;
2924 tree stmt;
2925
2926 /* It makes no sense to keep track of PHI nodes. */
2927 stmt = *stmt_p;
2928 if (TREE_CODE (stmt) == PHI_NODE)
2929 return;
2930
2931 buf = VEC_pop (scb_t, scb_stack);
2932 gcc_assert (stmt_p == buf->stmt_p);
2933
2934 /* Free memory used by the buffer. */
2935 BITMAP_FREE (buf->loads);
2936 BITMAP_FREE (buf->stores);
2937 buf->stmt_p = NULL;
2938 free (buf);
2939}
38635499
DN
2940
2941
2942/* Returns true if statement STMT may access memory. */
2943
2944bool
2945stmt_references_memory_p (tree stmt)
2946{
2947 if (!gimple_ssa_operands (cfun)->ops_active || TREE_CODE (stmt) == PHI_NODE)
2948 return false;
2949
2950 return stmt_ann (stmt)->references_memory;
2951}
2952
2953
2954/* Return the memory partition tag (MPT) associated with memory
2955 symbol SYM. From a correctness standpoint, memory partitions can
2956 be assigned in any arbitrary fashion as long as this rule is
2957 observed: Given two memory partitions MPT.i and MPT.j, they must
2958 not contain symbols in common.
2959
2960 Memory partitions are used when putting the program into Memory-SSA
2961 form. In particular, in Memory-SSA PHI nodes are not computed for
2962 individual memory symbols. They are computed for memory
2963 partitions. This reduces the amount of PHI nodes in the SSA graph
2964 at the expense of precision (i.e., it makes unrelated stores affect
2965 each other).
2966
2967 However, it is possible to increase precision by changing this
2968 partitioning scheme. For instance, if the partitioning scheme is
2969 such that get_mpt_for is the identity function (that is,
2970 get_mpt_for (s) = s), this will result in ultimate precision at the
2971 expense of huge SSA webs.
2972
2973 At the other extreme, a partitioning scheme that groups all the
2974 symbols in the same set results in minimal SSA webs and almost
2975 total loss of precision. */
2976
2977tree
2978get_mpt_for (tree sym)
2979{
2980 tree mpt;
2981
2982 /* Don't create a new tag unnecessarily. */
2983 mpt = memory_partition (sym);
2984 if (mpt == NULL_TREE)
2985 {
2986 mpt = create_tag_raw (MEMORY_PARTITION_TAG, TREE_TYPE (sym), "MPT");
2987 TREE_ADDRESSABLE (mpt) = 0;
2988 MTAG_GLOBAL (mpt) = 1;
2989 add_referenced_var (mpt);
2990 VEC_safe_push (tree, heap, gimple_ssa_operands (cfun)->mpt_table, mpt);
497f1b81 2991 MPT_SYMBOLS (mpt) = BITMAP_ALLOC (&operands_bitmap_obstack);
38635499
DN
2992 set_memory_partition (sym, mpt);
2993 }
2994
2995 return mpt;
2996}
2997
2998
2999/* Dump memory partition information to FILE. */
3000
3001void
3002dump_memory_partitions (FILE *file)
3003{
3004 unsigned i, npart;
3005 unsigned long nsyms;
3006 tree mpt;
3007
3008 fprintf (file, "\nMemory partitions\n\n");
3009 for (i = 0, npart = 0, nsyms = 0;
3010 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, i, mpt);
3011 i++)
3012 {
3013 if (mpt)
3014 {
3015 bitmap syms = MPT_SYMBOLS (mpt);
3016 unsigned long n = bitmap_count_bits (syms);
3017
3018 fprintf (file, "#%u: ", i);
3019 print_generic_expr (file, mpt, 0);
3020 fprintf (file, ": %lu elements: ", n);
3021 dump_decl_set (file, syms);
3022 npart++;
3023 nsyms += n;
3024 }
3025 }
3026
3027 fprintf (file, "\n%u memory partitions holding %lu symbols\n", npart, nsyms);
3028}
3029
3030
3031/* Dump memory partition information to stderr. */
3032
3033void
3034debug_memory_partitions (void)
3035{
3036 dump_memory_partitions (stderr);
3037}
This page took 2.026469 seconds and 5 git commands to generate.