]> gcc.gnu.org Git - gcc.git/blob - gcc/tree-ssa-operands.c
tree.def (VTABLE_REF): Remove.
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34
35 /* Flags to describe operand properties in get_stmt_operands and helpers. */
36
37 /* By default, operands are loaded. */
38 #define opf_none 0
39
40 /* Operand is the target of an assignment expression or a
41 call-clobbered variable */
42 #define opf_is_def (1 << 0)
43
44 /* Operand is the target of an assignment expression. */
45 #define opf_kill_def (1 << 2)
46
47 /* No virtual operands should be created in the expression. This is used
48 when traversing ADDR_EXPR nodes which have different semantics than
49 other expressions. Inside an ADDR_EXPR node, the only operands that we
50 need to consider are indices into arrays. For instance, &a.b[i] should
51 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
52 VUSE for 'b'. */
53 #define opf_no_vops (1 << 1)
54
55 /* Array for building all the def operands. */
56 static GTY (()) varray_type build_defs;
57
58 /* Array for building all the use operands. */
59 static GTY (()) varray_type build_uses;
60
61 /* Array for building all the v_may_def operands. */
62 static GTY (()) varray_type build_v_may_defs;
63
64 /* Array for building all the vuse operands. */
65 static GTY (()) varray_type build_vuses;
66
67 /* Array for building all the v_must_def operands. */
68 static GTY (()) varray_type build_v_must_defs;
69
70 #ifdef ENABLE_CHECKING
71 tree check_build_stmt;
72 #endif
73
74 typedef struct voperands_d
75 {
76 v_may_def_optype v_may_def_ops;
77 vuse_optype vuse_ops;
78 v_must_def_optype v_must_def_ops;
79 } *voperands_t;
80
81 static void note_addressable (tree, stmt_ann_t);
82 static void get_expr_operands (tree, tree *, int, voperands_t);
83 static inline void append_def (tree *, tree);
84 static inline void append_use (tree *, tree);
85 static void append_v_may_def (tree, tree, voperands_t);
86 static void append_v_must_def (tree, tree, voperands_t);
87 static void add_call_clobber_ops (tree, voperands_t);
88 static void add_call_read_ops (tree, voperands_t);
89 static void add_stmt_operand (tree *, tree, int, voperands_t);
90
91 /* Return a vector of contiguous memory of a specified size. */
92
93 static inline def_optype
94 allocate_def_optype (unsigned num)
95 {
96 def_optype def_ops;
97 unsigned size;
98 size = sizeof (struct def_optype_d) + sizeof (tree *) * (num - 1);
99 def_ops = ggc_alloc (size);
100 def_ops->num_defs = num;
101 return def_ops;
102 }
103
104 static inline use_optype
105 allocate_use_optype (unsigned num)
106 {
107 use_optype use_ops;
108 unsigned size;
109 size = sizeof (struct use_optype_d) + sizeof (tree *) * (num - 1);
110 use_ops = ggc_alloc (size);
111 use_ops->num_uses = num;
112 return use_ops;
113 }
114
115 static inline v_may_def_optype
116 allocate_v_may_def_optype (unsigned num)
117 {
118 v_may_def_optype v_may_def_ops;
119 unsigned size;
120 size = sizeof (struct v_may_def_optype_d) + sizeof (tree) * ((num * 2) - 1);
121 v_may_def_ops = ggc_alloc (size);
122 v_may_def_ops->num_v_may_defs = num;
123 return v_may_def_ops;
124 }
125
126 static inline vuse_optype
127 allocate_vuse_optype (unsigned num)
128 {
129 vuse_optype vuse_ops;
130 unsigned size;
131 size = sizeof (struct vuse_optype_d) + sizeof (tree) * (num - 1);
132 vuse_ops = ggc_alloc (size);
133 vuse_ops->num_vuses = num;
134 return vuse_ops;
135 }
136
137 static inline v_must_def_optype
138 allocate_v_must_def_optype (unsigned num)
139 {
140 v_must_def_optype v_must_def_ops;
141 unsigned size;
142 size = sizeof (struct v_must_def_optype_d) + sizeof (tree *) * (num - 1);
143 v_must_def_ops = ggc_alloc (size);
144 v_must_def_ops->num_v_must_defs = num;
145 return v_must_def_ops;
146 }
147
148 static inline void
149 free_uses (use_optype *uses, bool dealloc)
150 {
151 if (*uses)
152 {
153 if (dealloc)
154 ggc_free (*uses);
155 *uses = NULL;
156 }
157 }
158
159 static inline void
160 free_defs (def_optype *defs, bool dealloc)
161 {
162 if (*defs)
163 {
164 if (dealloc)
165 ggc_free (*defs);
166 *defs = NULL;
167 }
168 }
169
170 static inline void
171 free_vuses (vuse_optype *vuses, bool dealloc)
172 {
173 if (*vuses)
174 {
175 if (dealloc)
176 ggc_free (*vuses);
177 *vuses = NULL;
178 }
179 }
180
181 static inline void
182 free_v_may_defs (v_may_def_optype *v_may_defs, bool dealloc)
183 {
184 if (*v_may_defs)
185 {
186 if (dealloc)
187 ggc_free (*v_may_defs);
188 *v_may_defs = NULL;
189 }
190 }
191
192 static inline void
193 free_v_must_defs (v_must_def_optype *v_must_defs, bool dealloc)
194 {
195 if (*v_must_defs)
196 {
197 if (dealloc)
198 ggc_free (*v_must_defs);
199 *v_must_defs = NULL;
200 }
201 }
202
203 void
204 remove_vuses (tree stmt)
205 {
206 stmt_ann_t ann;
207
208 ann = stmt_ann (stmt);
209 if (ann)
210 free_vuses (&(ann->vuse_ops), true);
211 }
212
213 void
214 remove_v_may_defs (tree stmt)
215 {
216 stmt_ann_t ann;
217
218 ann = stmt_ann (stmt);
219 if (ann)
220 free_v_may_defs (&(ann->v_may_def_ops), true);
221 }
222
223 void
224 remove_v_must_defs (tree stmt)
225 {
226 stmt_ann_t ann;
227
228 ann = stmt_ann (stmt);
229 if (ann)
230 free_v_must_defs (&(ann->v_must_def_ops), true);
231 }
232
233 void
234 init_ssa_operands (void)
235 {
236 VARRAY_TREE_PTR_INIT (build_defs, 5, "build defs");
237 VARRAY_TREE_PTR_INIT (build_uses, 10, "build uses");
238 VARRAY_TREE_INIT (build_v_may_defs, 10, "build v_may_defs");
239 VARRAY_TREE_INIT (build_vuses, 10, "build vuses");
240 VARRAY_TREE_INIT (build_v_must_defs, 10, "build v_must_defs");
241 }
242
243 void
244 fini_ssa_operands (void)
245 {
246 }
247
248 static void
249 finalize_ssa_defs (tree stmt)
250 {
251 unsigned num, x;
252 stmt_ann_t ann;
253 def_optype def_ops;
254
255 num = VARRAY_ACTIVE_SIZE (build_defs);
256 if (num == 0)
257 return;
258
259 #ifdef ENABLE_CHECKING
260 /* There should only be a single real definition per assignment. */
261 if (TREE_CODE (stmt) == MODIFY_EXPR && num > 1)
262 abort ();
263 #endif
264
265 def_ops = allocate_def_optype (num);
266 for (x = 0; x < num ; x++)
267 def_ops->defs[x].def = VARRAY_TREE_PTR (build_defs, x);
268 VARRAY_POP_ALL (build_defs);
269
270 ann = stmt_ann (stmt);
271 ann->def_ops = def_ops;
272 }
273
274 static void
275 finalize_ssa_uses (tree stmt)
276 {
277 unsigned num, x;
278 use_optype use_ops;
279 stmt_ann_t ann;
280
281 num = VARRAY_ACTIVE_SIZE (build_uses);
282 if (num == 0)
283 return;
284
285 #ifdef ENABLE_CHECKING
286 {
287 unsigned x;
288 /* If the pointer to the operand is the statement itself, something is
289 wrong. It means that we are pointing to a local variable (the
290 initial call to get_stmt_operands does not pass a pointer to a
291 statement). */
292 for (x = 0; x < num; x++)
293 if (*(VARRAY_TREE_PTR (build_uses, x)) == stmt)
294 abort ();
295 }
296 #endif
297
298 use_ops = allocate_use_optype (num);
299 for (x = 0; x < num ; x++)
300 use_ops->uses[x].use = VARRAY_TREE_PTR (build_uses, x);
301 VARRAY_POP_ALL (build_uses);
302
303 ann = stmt_ann (stmt);
304 ann->use_ops = use_ops;
305 }
306
307 static void
308 finalize_ssa_v_may_defs (tree stmt)
309 {
310 unsigned num, x;
311 v_may_def_optype v_may_def_ops;
312 stmt_ann_t ann;
313
314 num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
315 if (num == 0)
316 return;
317
318 #ifdef ENABLE_CHECKING
319 /* V_MAY_DEFs must be entered in pairs of result/uses. */
320 if (num % 2 != 0)
321 abort();
322 #endif
323
324 v_may_def_ops = allocate_v_may_def_optype (num / 2);
325 for (x = 0; x < num; x++)
326 v_may_def_ops->v_may_defs[x] = VARRAY_TREE (build_v_may_defs, x);
327 VARRAY_CLEAR (build_v_may_defs);
328
329 ann = stmt_ann (stmt);
330 ann->v_may_def_ops = v_may_def_ops;
331 }
332
333 static inline void
334 finalize_ssa_vuses (tree stmt)
335 {
336 unsigned num, x;
337 stmt_ann_t ann;
338 vuse_optype vuse_ops;
339 v_may_def_optype v_may_defs;
340
341 #ifdef ENABLE_CHECKING
342 if (VARRAY_ACTIVE_SIZE (build_v_may_defs) > 0)
343 {
344 fprintf (stderr, "Please finalize V_MAY_DEFs before finalize VUSES.\n");
345 abort ();
346 }
347 #endif
348
349 num = VARRAY_ACTIVE_SIZE (build_vuses);
350 if (num == 0)
351 return;
352
353 /* Remove superfluous VUSE operands. If the statement already has a
354 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
355 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
356 suppose that variable 'a' is aliased:
357
358 # VUSE <a_2>
359 # a_3 = V_MAY_DEF <a_2>
360 a = a + 1;
361
362 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
363 operation. */
364
365 ann = stmt_ann (stmt);
366 v_may_defs = V_MAY_DEF_OPS (ann);
367 if (NUM_V_MAY_DEFS (v_may_defs) > 0)
368 {
369 size_t i, j;
370 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
371 {
372 bool found = false;
373 for (j = 0; j < NUM_V_MAY_DEFS (v_may_defs); j++)
374 {
375 tree vuse_var, v_may_def_var;
376 tree vuse = VARRAY_TREE (build_vuses, i);
377 tree v_may_def = V_MAY_DEF_OP (v_may_defs, j);
378
379 if (TREE_CODE (vuse) == SSA_NAME)
380 vuse_var = SSA_NAME_VAR (vuse);
381 else
382 vuse_var = vuse;
383
384 if (TREE_CODE (v_may_def) == SSA_NAME)
385 v_may_def_var = SSA_NAME_VAR (v_may_def);
386 else
387 v_may_def_var = v_may_def;
388
389 if (vuse_var == v_may_def_var)
390 {
391 found = true;
392 break;
393 }
394 }
395
396 /* If we found a useless VUSE operand, remove it from the
397 operand array by replacing it with the last active element
398 in the operand array (unless the useless VUSE was the
399 last operand, in which case we simply remove it. */
400 if (found)
401 {
402 if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
403 {
404 VARRAY_TREE (build_vuses, i)
405 = VARRAY_TREE (build_vuses,
406 VARRAY_ACTIVE_SIZE (build_vuses) - 1);
407 }
408 VARRAY_POP (build_vuses);
409
410 /* We want to rescan the element at this index, unless
411 this was the last element, in which case the loop
412 terminates. */
413 i--;
414 }
415 }
416 }
417
418 num = VARRAY_ACTIVE_SIZE (build_vuses);
419 /* We could have reduced the size to zero now, however. */
420 if (num == 0)
421 return;
422
423 vuse_ops = allocate_vuse_optype (num);
424 for (x = 0; x < num; x++)
425 vuse_ops->vuses[x] = VARRAY_TREE (build_vuses, x);
426 VARRAY_CLEAR (build_vuses);
427 ann->vuse_ops = vuse_ops;
428 }
429
430 static void
431 finalize_ssa_v_must_defs (tree stmt)
432 {
433 unsigned num, x;
434 stmt_ann_t ann;
435 v_must_def_optype v_must_def_ops;
436
437 num = VARRAY_ACTIVE_SIZE (build_v_must_defs);
438 if (num == 0)
439 return;
440
441 #ifdef ENABLE_CHECKING
442 /* There should only be a single V_MUST_DEF per assignment. */
443 if (TREE_CODE (stmt) == MODIFY_EXPR && num > 1)
444 abort ();
445 #endif
446
447 v_must_def_ops = allocate_v_must_def_optype (num);
448 for (x = 0; x < num ; x++)
449 v_must_def_ops->v_must_defs[x] = VARRAY_TREE (build_v_must_defs, x);
450 VARRAY_POP_ALL (build_v_must_defs);
451
452 ann = stmt_ann (stmt);
453 ann->v_must_def_ops = v_must_def_ops;
454 }
455
456 extern void
457 finalize_ssa_stmt_operands (tree stmt)
458 {
459 #ifdef ENABLE_CHECKING
460 if (check_build_stmt == NULL)
461 abort();
462 #endif
463
464 finalize_ssa_defs (stmt);
465 finalize_ssa_uses (stmt);
466 finalize_ssa_v_must_defs (stmt);
467 finalize_ssa_v_may_defs (stmt);
468 finalize_ssa_vuses (stmt);
469
470 #ifdef ENABLE_CHECKING
471 check_build_stmt = NULL;
472 #endif
473 }
474
475
476 extern void
477 verify_start_operands (tree stmt ATTRIBUTE_UNUSED)
478 {
479 #ifdef ENABLE_CHECKING
480 if (VARRAY_ACTIVE_SIZE (build_defs) > 0
481 || VARRAY_ACTIVE_SIZE (build_uses) > 0
482 || VARRAY_ACTIVE_SIZE (build_vuses) > 0
483 || VARRAY_ACTIVE_SIZE (build_v_may_defs) > 0
484 || VARRAY_ACTIVE_SIZE (build_v_must_defs) > 0)
485 abort ();
486 if (check_build_stmt != NULL)
487 abort();
488 check_build_stmt = stmt;
489 #endif
490 }
491
492
493 /* Add DEF_P to the list of pointers to operands defined by STMT. */
494
495 static inline void
496 append_def (tree *def_p, tree stmt ATTRIBUTE_UNUSED)
497 {
498 #ifdef ENABLE_CHECKING
499 if (check_build_stmt != stmt)
500 abort();
501 #endif
502 VARRAY_PUSH_TREE_PTR (build_defs, def_p);
503 }
504
505
506 /* Add USE_P to the list of pointers to operands used by STMT. */
507
508 static inline void
509 append_use (tree *use_p, tree stmt ATTRIBUTE_UNUSED)
510 {
511 #ifdef ENABLE_CHECKING
512 if (check_build_stmt != stmt)
513 abort();
514 #endif
515 VARRAY_PUSH_TREE_PTR (build_uses, use_p);
516 }
517
518
519 /* Add a new virtual def for variable VAR to statement STMT. If PREV_VOPS
520 is not NULL, the existing entries are preserved and no new entries are
521 added here. This is done to preserve the SSA numbering of virtual
522 operands. */
523
524 static void
525 append_v_may_def (tree var, tree stmt, voperands_t prev_vops)
526 {
527 stmt_ann_t ann;
528 size_t i;
529 tree result, source;
530
531 #ifdef ENABLE_CHECKING
532 if (check_build_stmt != stmt)
533 abort();
534 #endif
535
536 ann = stmt_ann (stmt);
537
538 /* Don't allow duplicate entries. */
539
540 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i += 2)
541 {
542 tree result = VARRAY_TREE (build_v_may_defs, i);
543 if (var == result
544 || (TREE_CODE (result) == SSA_NAME
545 && var == SSA_NAME_VAR (result)))
546 return;
547 }
548
549 /* If the statement already had virtual definitions, see if any of the
550 existing V_MAY_DEFs matches VAR. If so, re-use it, otherwise add a new
551 V_MAY_DEF for VAR. */
552 result = NULL_TREE;
553 source = NULL_TREE;
554 if (prev_vops)
555 for (i = 0; i < NUM_V_MAY_DEFS (prev_vops->v_may_def_ops); i++)
556 {
557 result = V_MAY_DEF_RESULT (prev_vops->v_may_def_ops, i);
558 if (result == var
559 || (TREE_CODE (result) == SSA_NAME
560 && SSA_NAME_VAR (result) == var))
561 {
562 source = V_MAY_DEF_OP (prev_vops->v_may_def_ops, i);
563 break;
564 }
565 }
566
567 /* If no previous V_MAY_DEF operand was found for VAR, create one now. */
568 if (source == NULL_TREE)
569 {
570 result = var;
571 source = var;
572 }
573
574 VARRAY_PUSH_TREE (build_v_may_defs, result);
575 VARRAY_PUSH_TREE (build_v_may_defs, source);
576 }
577
578
579 /* Add VAR to the list of virtual uses for STMT. If PREV_VOPS
580 is not NULL, the existing entries are preserved and no new entries are
581 added here. This is done to preserve the SSA numbering of virtual
582 operands. */
583
584 static void
585 append_vuse (tree var, tree stmt, voperands_t prev_vops)
586 {
587 stmt_ann_t ann;
588 size_t i;
589 bool found;
590 tree vuse;
591
592 #ifdef ENABLE_CHECKING
593 if (check_build_stmt != stmt)
594 abort();
595 #endif
596
597 ann = stmt_ann (stmt);
598
599 /* Don't allow duplicate entries. */
600 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
601 {
602 tree vuse_var = VARRAY_TREE (build_vuses, i);
603 if (var == vuse_var
604 || (TREE_CODE (vuse_var) == SSA_NAME
605 && var == SSA_NAME_VAR (vuse_var)))
606 return;
607 }
608
609 /* If the statement already had virtual uses, see if any of the
610 existing VUSEs matches VAR. If so, re-use it, otherwise add a new
611 VUSE for VAR. */
612 found = false;
613 vuse = NULL_TREE;
614 if (prev_vops)
615 for (i = 0; i < NUM_VUSES (prev_vops->vuse_ops); i++)
616 {
617 vuse = VUSE_OP (prev_vops->vuse_ops, i);
618 if (vuse == var
619 || (TREE_CODE (vuse) == SSA_NAME
620 && SSA_NAME_VAR (vuse) == var))
621 {
622 found = true;
623 break;
624 }
625 }
626
627 /* If VAR existed already in PREV_VOPS, re-use it. */
628 if (found)
629 var = vuse;
630
631 VARRAY_PUSH_TREE (build_vuses, var);
632 }
633
634 /* Add VAR to the list of virtual must definitions for STMT. If PREV_VOPS
635 is not NULL, the existing entries are preserved and no new entries are
636 added here. This is done to preserve the SSA numbering of virtual
637 operands. */
638
639 static void
640 append_v_must_def (tree var, tree stmt, voperands_t prev_vops)
641 {
642 stmt_ann_t ann;
643 size_t i;
644 bool found;
645 tree v_must_def;
646
647 #ifdef ENABLE_CHECKING
648 if (check_build_stmt != stmt)
649 abort();
650 #endif
651
652 ann = stmt_ann (stmt);
653
654 /* Don't allow duplicate entries. */
655 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_must_defs); i++)
656 {
657 tree v_must_def_var = VARRAY_TREE (build_v_must_defs, i);
658 if (var == v_must_def_var
659 || (TREE_CODE (v_must_def_var) == SSA_NAME
660 && var == SSA_NAME_VAR (v_must_def_var)))
661 return;
662 }
663
664 /* If the statement already had virtual must defs, see if any of the
665 existing V_MUST_DEFs matches VAR. If so, re-use it, otherwise add a new
666 V_MUST_DEF for VAR. */
667 found = false;
668 v_must_def = NULL_TREE;
669 if (prev_vops)
670 for (i = 0; i < NUM_V_MUST_DEFS (prev_vops->v_must_def_ops); i++)
671 {
672 v_must_def = V_MUST_DEF_OP (prev_vops->v_must_def_ops, i);
673 if (v_must_def == var
674 || (TREE_CODE (v_must_def) == SSA_NAME
675 && SSA_NAME_VAR (v_must_def) == var))
676 {
677 found = true;
678 break;
679 }
680 }
681
682 /* If VAR existed already in PREV_VOPS, re-use it. */
683 if (found)
684 var = v_must_def;
685
686 VARRAY_PUSH_TREE (build_v_must_defs, var);
687 }
688
689
690 /* External entry point which by-passes the previous vops mechanism. */
691 void
692 add_vuse (tree var, tree stmt)
693 {
694 append_vuse (var, stmt, NULL);
695 }
696
697
698 /* Get the operands of statement STMT. Note that repeated calls to
699 get_stmt_operands for the same statement will do nothing until the
700 statement is marked modified by a call to modify_stmt(). */
701
702 void
703 get_stmt_operands (tree stmt)
704 {
705 enum tree_code code;
706 stmt_ann_t ann;
707 struct voperands_d prev_vops;
708
709 #if defined ENABLE_CHECKING
710 /* The optimizers cannot handle statements that are nothing but a
711 _DECL. This indicates a bug in the gimplifier. */
712 if (SSA_VAR_P (stmt))
713 abort ();
714 #endif
715
716 /* Ignore error statements. */
717 if (TREE_CODE (stmt) == ERROR_MARK)
718 return;
719
720 ann = get_stmt_ann (stmt);
721
722 /* If the statement has not been modified, the operands are still valid. */
723 if (!ann->modified)
724 return;
725
726 timevar_push (TV_TREE_OPS);
727
728 /* Initially assume that the statement has no volatile operands, nor
729 makes aliased loads or stores. */
730 ann->has_volatile_ops = false;
731 ann->makes_aliased_stores = false;
732 ann->makes_aliased_loads = false;
733
734 /* Remove any existing operands as they will be scanned again. */
735 free_defs (&(ann->def_ops), true);
736 free_uses (&(ann->use_ops), true);
737
738 /* Before removing existing virtual operands, save them in PREV_VOPS so
739 that we can re-use their SSA versions. */
740 prev_vops.v_may_def_ops = V_MAY_DEF_OPS (ann);
741 prev_vops.vuse_ops = VUSE_OPS (ann);
742 prev_vops.v_must_def_ops = V_MUST_DEF_OPS (ann);
743
744 /* Dont free the previous values to memory since we're still using them. */
745 free_v_may_defs (&(ann->v_may_def_ops), false);
746 free_vuses (&(ann->vuse_ops), false);
747 free_v_must_defs (&(ann->v_must_def_ops), false);
748
749 start_ssa_stmt_operands (stmt);
750
751 code = TREE_CODE (stmt);
752 switch (code)
753 {
754 case MODIFY_EXPR:
755 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none, &prev_vops);
756 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_REF
757 || TREE_CODE (TREE_OPERAND (stmt, 0)) == COMPONENT_REF
758 || TREE_CODE (TREE_OPERAND (stmt, 0)) == REALPART_EXPR
759 || TREE_CODE (TREE_OPERAND (stmt, 0)) == IMAGPART_EXPR
760 /* Use a V_MAY_DEF if the RHS might throw, as the LHS won't be
761 modified in that case. FIXME we should represent somehow
762 that it is killed on the fallthrough path. */
763 || tree_could_throw_p (TREE_OPERAND (stmt, 1)))
764 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_is_def,
765 &prev_vops);
766 else
767 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0),
768 opf_is_def | opf_kill_def, &prev_vops);
769 break;
770
771 case COND_EXPR:
772 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none, &prev_vops);
773 break;
774
775 case SWITCH_EXPR:
776 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none, &prev_vops);
777 break;
778
779 case ASM_EXPR:
780 {
781 int noutputs = list_length (ASM_OUTPUTS (stmt));
782 const char **oconstraints
783 = (const char **) alloca ((noutputs) * sizeof (const char *));
784 int i;
785 tree link;
786 const char *constraint;
787 bool allows_mem, allows_reg, is_inout;
788
789 for (i=0, link = ASM_OUTPUTS (stmt); link;
790 ++i, link = TREE_CHAIN (link))
791 {
792 oconstraints[i] = constraint
793 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
794 parse_output_constraint (&constraint, i, 0, 0,
795 &allows_mem, &allows_reg, &is_inout);
796 if (allows_reg && is_inout)
797 /* This should have been split in gimplify_asm_expr. */
798 abort ();
799
800 if (!allows_reg && allows_mem)
801 {
802 tree t = get_base_address (TREE_VALUE (link));
803 if (t && DECL_P (t))
804 mark_call_clobbered (t);
805 }
806
807 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def,
808 &prev_vops);
809 }
810
811 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
812 {
813 constraint
814 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
815 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
816 oconstraints, &allows_mem, &allows_reg);
817
818 if (!allows_reg && allows_mem)
819 {
820 tree t = get_base_address (TREE_VALUE (link));
821 if (t && DECL_P (t))
822 mark_call_clobbered (t);
823 }
824
825 get_expr_operands (stmt, &TREE_VALUE (link), 0, &prev_vops);
826 }
827
828 /* Clobber memory for asm ("" : : : "memory"); */
829 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
830 if (!strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory"))
831 add_call_clobber_ops (stmt, &prev_vops);
832 }
833 break;
834
835 case RETURN_EXPR:
836 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none, &prev_vops);
837 break;
838
839 case GOTO_EXPR:
840 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none, &prev_vops);
841 break;
842
843 case LABEL_EXPR:
844 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none, &prev_vops);
845 break;
846
847 /* These nodes contain no variable references. */
848 case BIND_EXPR:
849 case CASE_LABEL_EXPR:
850 case TRY_CATCH_EXPR:
851 case TRY_FINALLY_EXPR:
852 case EH_FILTER_EXPR:
853 case CATCH_EXPR:
854 case RESX_EXPR:
855 break;
856
857 default:
858 /* Notice that if get_expr_operands tries to use &STMT as the operand
859 pointer (which may only happen for USE operands), we will abort in
860 append_use. This default will handle statements like empty statements,
861 CALL_EXPRs or VA_ARG_EXPRs that may appear on the RHS of a statement
862 or as statements themselves. */
863 get_expr_operands (stmt, &stmt, opf_none, &prev_vops);
864 break;
865 }
866
867 finalize_ssa_stmt_operands (stmt);
868
869 /* Now free the previous virtual ops to memory. */
870 free_v_may_defs (&(prev_vops.v_may_def_ops), true);
871 free_vuses (&(prev_vops.vuse_ops), true);
872 free_v_must_defs (&(prev_vops.v_must_def_ops), true);
873
874 /* Clear the modified bit for STMT. Subsequent calls to
875 get_stmt_operands for this statement will do nothing until the
876 statement is marked modified by a call to modify_stmt(). */
877 ann->modified = 0;
878
879 timevar_pop (TV_TREE_OPS);
880 }
881
882
883 /* Recursively scan the expression pointed by EXPR_P in statement STMT.
884 FLAGS is one of the OPF_* constants modifying how to interpret the
885 operands found. PREV_VOPS is as in append_v_may_def and append_vuse. */
886
887 static void
888 get_expr_operands (tree stmt, tree *expr_p, int flags, voperands_t prev_vops)
889 {
890 enum tree_code code;
891 char class;
892 tree expr = *expr_p;
893
894 if (expr == NULL || expr == error_mark_node)
895 return;
896
897 code = TREE_CODE (expr);
898 class = TREE_CODE_CLASS (code);
899
900 /* We could have the address of a component, array member, etc which
901 has interesting variable references. */
902 if (code == ADDR_EXPR)
903 {
904 /* Taking the address of a variable does not represent a
905 reference to it, but the fact that STMT takes its address will be
906 of interest to some passes (e.g. alias resolution). */
907 add_stmt_operand (expr_p, stmt, 0, NULL);
908
909 /* If the address is constant (invariant is not sufficient), there will
910 be no interesting variable references inside. */
911 if (TREE_CONSTANT (expr))
912 return;
913
914 /* There should be no VUSEs created, since the referenced objects are
915 not really accessed. The only operands that we should find here
916 are ARRAY_REF indices which will always be real operands (GIMPLE
917 does not allow non-registers as array indices). */
918 flags |= opf_no_vops;
919
920 /* Avoid recursion. */
921 expr_p = &TREE_OPERAND (expr, 0);
922 expr = *expr_p;
923 code = TREE_CODE (expr);
924 class = TREE_CODE_CLASS (code);
925 }
926
927 /* Expressions that make no memory references. */
928 if (class == 'c'
929 || class == 't'
930 || code == BLOCK
931 || code == FUNCTION_DECL
932 || code == EXC_PTR_EXPR
933 || code == FILTER_EXPR
934 || code == LABEL_DECL)
935 return;
936
937 /* If we found a variable, add it to DEFS or USES depending on the
938 operand flags. */
939 if (SSA_VAR_P (expr))
940 {
941 add_stmt_operand (expr_p, stmt, flags, prev_vops);
942 return;
943 }
944
945 /* Pointer dereferences always represent a use of the base pointer. */
946 if (code == INDIRECT_REF)
947 {
948 tree *pptr = &TREE_OPERAND (expr, 0);
949 tree ptr = *pptr;
950
951 if (SSA_VAR_P (ptr))
952 {
953 if (!aliases_computed_p)
954 {
955 /* If the pointer does not have a memory tag and aliases have not
956 been computed yet, mark the statement as having volatile
957 operands to prevent DOM from entering it in equivalence tables
958 and DCE from killing it. */
959 stmt_ann (stmt)->has_volatile_ops = true;
960 }
961 else
962 {
963 struct ptr_info_def *pi = NULL;
964
965 /* If we have computed aliasing already, check if PTR has
966 flow-sensitive points-to information. */
967 if (TREE_CODE (ptr) == SSA_NAME
968 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
969 && pi->name_mem_tag)
970 {
971 /* PTR has its own memory tag. Use it. */
972 add_stmt_operand (&pi->name_mem_tag, stmt, flags,
973 prev_vops);
974 }
975 else
976 {
977 /* If PTR is not an SSA_NAME or it doesn't have a name
978 tag, use its type memory tag. */
979 var_ann_t ann;
980
981 /* If we are emitting debugging dumps, display a warning if
982 PTR is an SSA_NAME with no flow-sensitive alias
983 information. That means that we may need to compute
984 aliasing again. */
985 if (dump_file
986 && TREE_CODE (ptr) == SSA_NAME
987 && pi == NULL)
988 {
989 fprintf (dump_file,
990 "NOTE: no flow-sensitive alias info for ");
991 print_generic_expr (dump_file, ptr, dump_flags);
992 fprintf (dump_file, " in ");
993 print_generic_stmt (dump_file, stmt, dump_flags);
994 }
995
996 if (TREE_CODE (ptr) == SSA_NAME)
997 ptr = SSA_NAME_VAR (ptr);
998 ann = var_ann (ptr);
999 add_stmt_operand (&ann->type_mem_tag, stmt, flags, prev_vops);
1000 }
1001 }
1002 }
1003
1004 /* If a constant is used as a pointer, we can't generate a real
1005 operand for it but we mark the statement volatile to prevent
1006 optimizations from messing things up. */
1007 else if (TREE_CODE (ptr) == INTEGER_CST)
1008 {
1009 stmt_ann (stmt)->has_volatile_ops = true;
1010 return;
1011 }
1012
1013 /* Everything else *should* have been folded elsewhere, but users
1014 are smarter than we in finding ways to write invalid code. We
1015 cannot just abort here. If we were absolutely certain that we
1016 do handle all valid cases, then we could just do nothing here.
1017 That seems optimistic, so attempt to do something logical... */
1018 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1019 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1020 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1021 {
1022 /* Make sure we know the object is addressable. */
1023 pptr = &TREE_OPERAND (ptr, 0);
1024 add_stmt_operand (pptr, stmt, 0, NULL);
1025
1026 /* Mark the object itself with a VUSE. */
1027 pptr = &TREE_OPERAND (*pptr, 0);
1028 get_expr_operands (stmt, pptr, flags, prev_vops);
1029 return;
1030 }
1031
1032 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1033 else
1034 abort ();
1035
1036 /* Add a USE operand for the base pointer. */
1037 get_expr_operands (stmt, pptr, opf_none, prev_vops);
1038 return;
1039 }
1040
1041 /* Treat array references as references to the virtual variable
1042 representing the array. The virtual variable for an ARRAY_REF
1043 is the VAR_DECL for the array. */
1044 if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
1045 {
1046 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1047 according to the value of IS_DEF. Recurse if the LHS of the
1048 ARRAY_REF node is not a regular variable. */
1049 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1050 add_stmt_operand (expr_p, stmt, flags, prev_vops);
1051 else
1052 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags, prev_vops);
1053
1054 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none, prev_vops);
1055 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none, prev_vops);
1056 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none, prev_vops);
1057 return;
1058 }
1059
1060 /* Similarly to arrays, references to compound variables (complex types
1061 and structures/unions) are globbed.
1062
1063 FIXME: This means that
1064
1065 a.x = 6;
1066 a.y = 7;
1067 foo (a.x, a.y);
1068
1069 will not be constant propagated because the two partial
1070 definitions to 'a' will kill each other. Note that SRA may be
1071 able to fix this problem if 'a' can be scalarized. */
1072 if (code == IMAGPART_EXPR || code == REALPART_EXPR || code == COMPONENT_REF)
1073 {
1074 /* If the LHS of the compound reference is not a regular variable,
1075 recurse to keep looking for more operands in the subexpression. */
1076 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1077 add_stmt_operand (expr_p, stmt, flags, prev_vops);
1078 else
1079 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags, prev_vops);
1080
1081 if (code == COMPONENT_REF)
1082 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none, prev_vops);
1083 return;
1084 }
1085
1086 /* Function calls. Add every argument to USES. If the callee is
1087 neither pure nor const, create a VDEF reference for GLOBAL_VAR
1088 (See find_vars_r). */
1089 if (code == CALL_EXPR)
1090 {
1091 tree op;
1092 int call_flags = call_expr_flags (expr);
1093
1094 /* Find uses in the called function. */
1095 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none, prev_vops);
1096
1097 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1098 get_expr_operands (stmt, &TREE_VALUE (op), opf_none, prev_vops);
1099
1100 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none, prev_vops);
1101
1102 if (bitmap_first_set_bit (call_clobbered_vars) >= 0)
1103 {
1104 /* A 'pure' or a 'const' functions never call clobber anything.
1105 A 'noreturn' function might, but since we don't return anyway
1106 there is no point in recording that. */
1107 if (!(call_flags
1108 & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1109 add_call_clobber_ops (stmt, prev_vops);
1110 else if (!(call_flags & (ECF_CONST | ECF_NORETURN)))
1111 add_call_read_ops (stmt, prev_vops);
1112 }
1113 else if (!aliases_computed_p)
1114 stmt_ann (stmt)->has_volatile_ops = true;
1115
1116 return;
1117 }
1118
1119 /* Lists. */
1120 if (code == TREE_LIST)
1121 {
1122 tree op;
1123
1124 for (op = expr; op; op = TREE_CHAIN (op))
1125 get_expr_operands (stmt, &TREE_VALUE (op), flags, prev_vops);
1126
1127 return;
1128 }
1129
1130 /* Assignments. */
1131 if (code == MODIFY_EXPR)
1132 {
1133 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none, prev_vops);
1134 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ARRAY_REF
1135 || TREE_CODE (TREE_OPERAND (expr, 0)) == COMPONENT_REF
1136 || TREE_CODE (TREE_OPERAND (expr, 0)) == REALPART_EXPR
1137 || TREE_CODE (TREE_OPERAND (expr, 0)) == IMAGPART_EXPR)
1138 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def,
1139 prev_vops);
1140 else
1141 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1142 opf_is_def | opf_kill_def, prev_vops);
1143 return;
1144 }
1145
1146
1147 /* Mark VA_ARG_EXPR nodes as making volatile references. FIXME,
1148 this is needed because we currently do not gimplify VA_ARG_EXPR
1149 properly. */
1150 if (code == VA_ARG_EXPR)
1151 {
1152 stmt_ann (stmt)->has_volatile_ops = true;
1153 return;
1154 }
1155
1156 /* Unary expressions. */
1157 if (class == '1'
1158 || code == TRUTH_NOT_EXPR
1159 || code == BIT_FIELD_REF
1160 || code == CONSTRUCTOR)
1161 {
1162 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags, prev_vops);
1163 return;
1164 }
1165
1166 /* Binary expressions. */
1167 if (class == '2'
1168 || class == '<'
1169 || code == TRUTH_AND_EXPR
1170 || code == TRUTH_OR_EXPR
1171 || code == TRUTH_XOR_EXPR
1172 || code == COMPOUND_EXPR
1173 || code == OBJ_TYPE_REF)
1174 {
1175 tree op0 = TREE_OPERAND (expr, 0);
1176 tree op1 = TREE_OPERAND (expr, 1);
1177
1178 /* If it would be profitable to swap the operands, then do so to
1179 canonicalize the statement, enabling better optimization.
1180
1181 By placing canonicalization of such expressions here we
1182 transparently keep statements in canonical form, even
1183 when the statement is modified. */
1184 if (tree_swap_operands_p (op0, op1, false))
1185 {
1186 /* For relationals we need to swap the operands and change
1187 the code. */
1188 if (code == LT_EXPR
1189 || code == GT_EXPR
1190 || code == LE_EXPR
1191 || code == GE_EXPR)
1192 {
1193 TREE_SET_CODE (expr, swap_tree_comparison (code));
1194 TREE_OPERAND (expr, 0) = op1;
1195 TREE_OPERAND (expr, 1) = op0;
1196 }
1197
1198 /* For a commutative operator we can just swap the operands. */
1199 if (commutative_tree_code (code))
1200 {
1201 TREE_OPERAND (expr, 0) = op1;
1202 TREE_OPERAND (expr, 1) = op0;
1203 }
1204 }
1205
1206 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags, prev_vops);
1207 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags, prev_vops);
1208 return;
1209 }
1210
1211 /* If we get here, something has gone wrong. */
1212 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1213 debug_tree (expr);
1214 fputs ("\n", stderr);
1215 abort ();
1216 }
1217
1218
1219 /* Add *VAR_P to the appropriate operand array of STMT. FLAGS is as in
1220 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1221 the statement's real operands, otherwise it is added to virtual
1222 operands.
1223
1224 PREV_VOPS is used when adding virtual operands to statements that
1225 already had them (See append_v_may_def and append_vuse). */
1226
1227 static void
1228 add_stmt_operand (tree *var_p, tree stmt, int flags, voperands_t prev_vops)
1229 {
1230 bool is_real_op;
1231 tree var, sym;
1232 stmt_ann_t s_ann;
1233 var_ann_t v_ann;
1234
1235 var = *var_p;
1236 STRIP_NOPS (var);
1237
1238 s_ann = stmt_ann (stmt);
1239
1240 /* If the operand is an ADDR_EXPR, add its operand to the list of
1241 variables that have had their address taken in this statement. */
1242 if (TREE_CODE (var) == ADDR_EXPR)
1243 {
1244 note_addressable (TREE_OPERAND (var, 0), s_ann);
1245 return;
1246 }
1247
1248 /* If the original variable is not a scalar, it will be added to the list
1249 of virtual operands. In that case, use its base symbol as the virtual
1250 variable representing it. */
1251 is_real_op = is_gimple_reg (var);
1252 if (!is_real_op && !DECL_P (var))
1253 var = get_virtual_var (var);
1254
1255 /* If VAR is not a variable that we care to optimize, do nothing. */
1256 if (var == NULL_TREE || !SSA_VAR_P (var))
1257 return;
1258
1259 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1260 v_ann = var_ann (sym);
1261
1262 /* FIXME: We currently refuse to optimize variables that have hidden uses
1263 (variables used in VLA declarations, MD builtin calls and variables
1264 from the parent function in nested functions). This is because not
1265 all uses of these variables are exposed in the IL or the statements
1266 that reference them are not in GIMPLE form. If that's the case, mark
1267 the statement as having volatile operands and return. */
1268 if (v_ann->has_hidden_use)
1269 {
1270 s_ann->has_volatile_ops = true;
1271 return;
1272 }
1273
1274 /* Don't expose volatile variables to the optimizers. */
1275 if (TREE_THIS_VOLATILE (sym))
1276 {
1277 s_ann->has_volatile_ops = true;
1278 return;
1279 }
1280
1281 if (is_real_op)
1282 {
1283 /* The variable is a GIMPLE register. Add it to real operands. */
1284 if (flags & opf_is_def)
1285 append_def (var_p, stmt);
1286 else
1287 append_use (var_p, stmt);
1288 }
1289 else
1290 {
1291 varray_type aliases;
1292
1293 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1294 virtual operands, unless the caller has specifically requested
1295 not to add virtual operands (used when adding operands inside an
1296 ADDR_EXPR expression). */
1297 if (flags & opf_no_vops)
1298 return;
1299
1300 aliases = v_ann->may_aliases;
1301
1302 /* If alias information hasn't been computed yet, then
1303 addressable variables will not be an alias tag nor will they
1304 have aliases. In this case, mark the statement as having
1305 volatile operands. */
1306 if (!aliases_computed_p && may_be_aliased (var))
1307 s_ann->has_volatile_ops = true;
1308
1309 if (aliases == NULL)
1310 {
1311 /* The variable is not aliased or it is an alias tag. */
1312 if (flags & opf_is_def)
1313 {
1314 if (v_ann->is_alias_tag)
1315 {
1316 /* Alias tagged vars get regular V_MAY_DEF */
1317 s_ann->makes_aliased_stores = 1;
1318 append_v_may_def (var, stmt, prev_vops);
1319 }
1320 else if ((flags & opf_kill_def)
1321 && v_ann->mem_tag_kind == NOT_A_TAG)
1322 /* V_MUST_DEF for non-aliased non-GIMPLE register
1323 variable definitions. Avoid memory tags. */
1324 append_v_must_def (var, stmt, prev_vops);
1325 else
1326 /* Call-clobbered variables & memory tags get
1327 V_MAY_DEF */
1328 append_v_may_def (var, stmt, prev_vops);
1329 }
1330 else
1331 {
1332 append_vuse (var, stmt, prev_vops);
1333 if (v_ann->is_alias_tag)
1334 s_ann->makes_aliased_loads = 1;
1335 }
1336 }
1337 else
1338 {
1339 size_t i;
1340
1341 /* The variable is aliased. Add its aliases to the virtual
1342 operands. */
1343 if (VARRAY_ACTIVE_SIZE (aliases) == 0)
1344 abort ();
1345
1346 if (flags & opf_is_def)
1347 {
1348 /* If the variable is also an alias tag, add a virtual
1349 operand for it, otherwise we will miss representing
1350 references to the members of the variable's alias set.
1351 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1352 if (v_ann->is_alias_tag)
1353 append_v_may_def (var, stmt, prev_vops);
1354
1355 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1356 append_v_may_def (VARRAY_TREE (aliases, i), stmt, prev_vops);
1357
1358 s_ann->makes_aliased_stores = 1;
1359 }
1360 else
1361 {
1362 if (v_ann->is_alias_tag)
1363 append_vuse (var, stmt, prev_vops);
1364
1365 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1366 append_vuse (VARRAY_TREE (aliases, i), stmt, prev_vops);
1367
1368 s_ann->makes_aliased_loads = 1;
1369 }
1370 }
1371 }
1372 }
1373
1374 /* Record that VAR had its address taken in the statement with annotations
1375 S_ANN. */
1376
1377 static void
1378 note_addressable (tree var, stmt_ann_t s_ann)
1379 {
1380 var = get_base_address (var);
1381 if (var && SSA_VAR_P (var))
1382 {
1383 if (s_ann->addresses_taken == NULL)
1384 s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
1385 bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
1386 }
1387 }
1388
1389
1390 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1391 clobbered variables in the function. */
1392
1393 static void
1394 add_call_clobber_ops (tree stmt, voperands_t prev_vops)
1395 {
1396 /* Functions that are not const, pure or never return may clobber
1397 call-clobbered variables. */
1398 stmt_ann (stmt)->makes_clobbering_call = true;
1399
1400 /* If we had created .GLOBAL_VAR earlier, use it. Otherwise, add
1401 a V_MAY_DEF operand for every call clobbered variable. See
1402 compute_may_aliases for the heuristic used to decide whether
1403 to create .GLOBAL_VAR or not. */
1404 if (global_var)
1405 add_stmt_operand (&global_var, stmt, opf_is_def, prev_vops);
1406 else
1407 {
1408 size_t i;
1409
1410 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1411 {
1412 tree var = referenced_var (i);
1413
1414 /* If VAR is read-only, don't add a V_MAY_DEF, just a
1415 VUSE operand. */
1416 if (!TREE_READONLY (var))
1417 add_stmt_operand (&var, stmt, opf_is_def, prev_vops);
1418 else
1419 add_stmt_operand (&var, stmt, opf_none, prev_vops);
1420 });
1421 }
1422 }
1423
1424
1425 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1426 function. */
1427
1428 static void
1429 add_call_read_ops (tree stmt, voperands_t prev_vops)
1430 {
1431 /* Otherwise, if the function is not pure, it may reference memory. Add
1432 a VUSE for .GLOBAL_VAR if it has been created. Otherwise, add a VUSE
1433 for each call-clobbered variable. See add_referenced_var for the
1434 heuristic used to decide whether to create .GLOBAL_VAR. */
1435 if (global_var)
1436 add_stmt_operand (&global_var, stmt, opf_none, prev_vops);
1437 else
1438 {
1439 size_t i;
1440
1441 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1442 {
1443 tree var = referenced_var (i);
1444 add_stmt_operand (&var, stmt, opf_none, prev_vops);
1445 });
1446 }
1447 }
1448
1449 #include "gt-tree-ssa-operands.h"
This page took 0.103218 seconds and 5 git commands to generate.