]>
Commit | Line | Data |
---|---|---|
6de9cd9a | 1 | /* SSA operands management for trees. |
20f06221 | 2 | Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc. |
6de9cd9a DN |
3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
8 | the Free Software Foundation; either version 2, or (at your option) | |
9 | any later version. | |
10 | ||
11 | GCC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
17 | along with GCC; see the file COPYING. If not, write to | |
366ccddb KC |
18 | the Free Software Foundation, 51 Franklin Street, Fifth Floor, |
19 | Boston, MA 02110-1301, USA. */ | |
6de9cd9a DN |
20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
25 | #include "tree.h" | |
26 | #include "flags.h" | |
27 | #include "function.h" | |
28 | #include "diagnostic.h" | |
29 | #include "tree-flow.h" | |
30 | #include "tree-inline.h" | |
31 | #include "tree-pass.h" | |
32 | #include "ggc.h" | |
33 | #include "timevar.h" | |
4c714dd4 | 34 | #include "toplev.h" |
6674a6ce | 35 | #include "langhooks.h" |
ea900239 | 36 | #include "ipa-reference.h" |
1a24f92f | 37 | |
6cb38cd4 | 38 | /* This file contains the code required to manage the operands cache of the |
1a24f92f | 39 | SSA optimizer. For every stmt, we maintain an operand cache in the stmt |
6cb38cd4 | 40 | annotation. This cache contains operands that will be of interest to |
1a24f92f AM |
41 | optimizers and other passes wishing to manipulate the IL. |
42 | ||
43 | The operand type are broken up into REAL and VIRTUAL operands. The real | |
44 | operands are represented as pointers into the stmt's operand tree. Thus | |
45 | any manipulation of the real operands will be reflected in the actual tree. | |
46 | Virtual operands are represented solely in the cache, although the base | |
47 | variable for the SSA_NAME may, or may not occur in the stmt's tree. | |
48 | Manipulation of the virtual operands will not be reflected in the stmt tree. | |
49 | ||
50 | The routines in this file are concerned with creating this operand cache | |
51 | from a stmt tree. | |
52 | ||
1a24f92f | 53 | The operand tree is the parsed by the various get_* routines which look |
2a7e31df | 54 | through the stmt tree for the occurrence of operands which may be of |
1a24f92f AM |
55 | interest, and calls are made to the append_* routines whenever one is |
56 | found. There are 5 of these routines, each representing one of the | |
57 | 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and | |
58 | Virtual Must Defs. | |
59 | ||
60 | The append_* routines check for duplication, and simply keep a list of | |
61 | unique objects for each operand type in the build_* extendable vectors. | |
62 | ||
63 | Once the stmt tree is completely parsed, the finalize_ssa_operands() | |
64 | routine is called, which proceeds to perform the finalization routine | |
65 | on each of the 5 operand vectors which have been built up. | |
66 | ||
67 | If the stmt had a previous operand cache, the finalization routines | |
f3b569ca | 68 | attempt to match up the new operands with the old ones. If it's a perfect |
1a24f92f AM |
69 | match, the old vector is simply reused. If it isn't a perfect match, then |
70 | a new vector is created and the new operands are placed there. For | |
71 | virtual operands, if the previous cache had SSA_NAME version of a | |
72 | variable, and that same variable occurs in the same operands cache, then | |
73 | the new cache vector will also get the same SSA_NAME. | |
74 | ||
454ff5cb | 75 | i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand |
1a24f92f AM |
76 | vector for VUSE, then the new vector will also be modified such that |
77 | it contains 'a_5' rather than 'a'. | |
78 | ||
79 | */ | |
80 | ||
81 | ||
1e6a5d3c | 82 | /* Flags to describe operand properties in helpers. */ |
6de9cd9a DN |
83 | |
84 | /* By default, operands are loaded. */ | |
85 | #define opf_none 0 | |
86 | ||
a32b97a2 BB |
87 | /* Operand is the target of an assignment expression or a |
88 | call-clobbered variable */ | |
6de9cd9a DN |
89 | #define opf_is_def (1 << 0) |
90 | ||
a32b97a2 | 91 | /* Operand is the target of an assignment expression. */ |
50dc9a88 | 92 | #define opf_kill_def (1 << 1) |
a32b97a2 | 93 | |
6de9cd9a DN |
94 | /* No virtual operands should be created in the expression. This is used |
95 | when traversing ADDR_EXPR nodes which have different semantics than | |
96 | other expressions. Inside an ADDR_EXPR node, the only operands that we | |
97 | need to consider are indices into arrays. For instance, &a.b[i] should | |
98 | generate a USE of 'i' but it should not generate a VUSE for 'a' nor a | |
99 | VUSE for 'b'. */ | |
50dc9a88 | 100 | #define opf_no_vops (1 << 2) |
6de9cd9a | 101 | |
0d2bf6f0 RH |
102 | /* Operand is a "non-specific" kill for call-clobbers and such. This is used |
103 | to distinguish "reset the world" events from explicit MODIFY_EXPRs. */ | |
104 | #define opf_non_specific (1 << 3) | |
105 | ||
f47c96aa | 106 | |
6de9cd9a | 107 | /* Array for building all the def operands. */ |
f3940b0e | 108 | static VEC(tree,heap) *build_defs; |
6de9cd9a DN |
109 | |
110 | /* Array for building all the use operands. */ | |
f3940b0e | 111 | static VEC(tree,heap) *build_uses; |
6de9cd9a | 112 | |
a32b97a2 | 113 | /* Array for building all the v_may_def operands. */ |
f3940b0e | 114 | static VEC(tree,heap) *build_v_may_defs; |
6de9cd9a DN |
115 | |
116 | /* Array for building all the vuse operands. */ | |
f3940b0e | 117 | static VEC(tree,heap) *build_vuses; |
6de9cd9a | 118 | |
a32b97a2 | 119 | /* Array for building all the v_must_def operands. */ |
f3940b0e | 120 | static VEC(tree,heap) *build_v_must_defs; |
a32b97a2 | 121 | |
e288e2f5 | 122 | |
6668f6a7 | 123 | /* These arrays are the cached operand vectors for call clobbered calls. */ |
f47c96aa | 124 | static bool ops_active = false; |
4c124b4c | 125 | |
f47c96aa AM |
126 | static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL; |
127 | static unsigned operand_memory_index; | |
4c124b4c | 128 | |
1a24f92f AM |
129 | static void get_expr_operands (tree, tree *, int); |
130 | static void get_asm_expr_operands (tree); | |
3c0b6c43 DB |
131 | static void get_indirect_ref_operands (tree, tree, int, tree, HOST_WIDE_INT, |
132 | HOST_WIDE_INT, bool); | |
ac182688 | 133 | static void get_tmr_operands (tree, tree, int); |
1a24f92f AM |
134 | static void get_call_expr_operands (tree, tree); |
135 | static inline void append_def (tree *); | |
136 | static inline void append_use (tree *); | |
137 | static void append_v_may_def (tree); | |
138 | static void append_v_must_def (tree); | |
ea900239 | 139 | static void add_call_clobber_ops (tree, tree); |
d16a5e36 | 140 | static void add_call_read_ops (tree, tree); |
e288e2f5 | 141 | static void add_stmt_operand (tree *, stmt_ann_t, int); |
3c0b6c43 DB |
142 | static void add_virtual_operand (tree, stmt_ann_t, int, tree, |
143 | HOST_WIDE_INT, HOST_WIDE_INT, | |
144 | bool); | |
f47c96aa AM |
145 | static void build_ssa_operands (tree stmt); |
146 | ||
147 | static def_optype_p free_defs = NULL; | |
148 | static use_optype_p free_uses = NULL; | |
149 | static vuse_optype_p free_vuses = NULL; | |
150 | static maydef_optype_p free_maydefs = NULL; | |
151 | static mustdef_optype_p free_mustdefs = NULL; | |
1a24f92f | 152 | |
1a24f92f | 153 | |
c83eecad | 154 | /* Return the DECL_UID of the base variable of T. */ |
1a24f92f | 155 | |
f47c96aa | 156 | static inline unsigned |
f3940b0e | 157 | get_name_decl (tree t) |
6de9cd9a | 158 | { |
f3940b0e AM |
159 | if (TREE_CODE (t) != SSA_NAME) |
160 | return DECL_UID (t); | |
161 | else | |
162 | return DECL_UID (SSA_NAME_VAR (t)); | |
6de9cd9a DN |
163 | } |
164 | ||
f3940b0e | 165 | /* Comparison function for qsort used in operand_build_sort_virtual. */ |
1a24f92f | 166 | |
f3940b0e AM |
167 | static int |
168 | operand_build_cmp (const void *p, const void *q) | |
a32b97a2 | 169 | { |
f3940b0e AM |
170 | tree e1 = *((const tree *)p); |
171 | tree e2 = *((const tree *)q); | |
172 | unsigned int u1,u2; | |
173 | ||
174 | u1 = get_name_decl (e1); | |
175 | u2 = get_name_decl (e2); | |
f47c96aa | 176 | |
f3940b0e | 177 | /* We want to sort in ascending order. They can never be equal. */ |
f47c96aa | 178 | #ifdef ENABLE_CHECKING |
f3940b0e | 179 | gcc_assert (u1 != u2); |
f47c96aa | 180 | #endif |
f3940b0e | 181 | return (u1 > u2 ? 1 : -1); |
a32b97a2 BB |
182 | } |
183 | ||
f3940b0e | 184 | /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */ |
1a24f92f | 185 | |
6de9cd9a | 186 | static inline void |
f3940b0e | 187 | operand_build_sort_virtual (VEC(tree,heap) *list) |
6de9cd9a | 188 | { |
f3940b0e AM |
189 | int num = VEC_length (tree, list); |
190 | if (num < 2) | |
191 | return; | |
192 | if (num == 2) | |
6de9cd9a | 193 | { |
f3940b0e AM |
194 | if (get_name_decl (VEC_index (tree, list, 0)) |
195 | > get_name_decl (VEC_index (tree, list, 1))) | |
196 | { | |
197 | /* Swap elements if in the wrong order. */ | |
198 | tree tmp = VEC_index (tree, list, 0); | |
199 | VEC_replace (tree, list, 0, VEC_index (tree, list, 1)); | |
200 | VEC_replace (tree, list, 1, tmp); | |
201 | } | |
f47c96aa | 202 | return; |
6de9cd9a | 203 | } |
f3940b0e AM |
204 | /* There are 3 or more elements, call qsort. */ |
205 | qsort (VEC_address (tree, list), | |
206 | VEC_length (tree, list), | |
207 | sizeof (tree), | |
208 | operand_build_cmp); | |
6de9cd9a DN |
209 | } |
210 | ||
f430bae8 | 211 | |
1a24f92f | 212 | |
f47c96aa | 213 | /* Return true if the ssa operands cache is active. */ |
1a24f92f | 214 | |
f47c96aa AM |
215 | bool |
216 | ssa_operands_active (void) | |
6de9cd9a | 217 | { |
f47c96aa AM |
218 | return ops_active; |
219 | } | |
6de9cd9a | 220 | |
d16a5e36 DB |
221 | /* Structure storing statistics on how many call clobbers we have, and |
222 | how many where avoided. */ | |
223 | static struct | |
224 | { | |
225 | /* Number of call-clobbered ops we attempt to add to calls in | |
226 | add_call_clobber_ops. */ | |
227 | unsigned int clobbered_vars; | |
228 | ||
229 | /* Number of write-clobbers (v_may_defs) avoided by using | |
230 | not_written information. */ | |
231 | unsigned int static_write_clobbers_avoided; | |
232 | ||
233 | /* Number of reads (vuses) avoided by using not_read | |
234 | information. */ | |
235 | unsigned int static_read_clobbers_avoided; | |
236 | ||
237 | /* Number of write-clobbers avoided because the variable can't escape to | |
238 | this call. */ | |
239 | unsigned int unescapable_clobbers_avoided; | |
6de9cd9a | 240 | |
d16a5e36 DB |
241 | /* Number of readonly uses we attempt to add to calls in |
242 | add_call_read_ops. */ | |
243 | unsigned int readonly_clobbers; | |
244 | ||
245 | /* Number of readonly uses we avoid using not_read information. */ | |
246 | unsigned int static_readonly_clobbers_avoided; | |
247 | } clobber_stats; | |
248 | ||
f47c96aa AM |
249 | /* Initialize the operand cache routines. */ |
250 | ||
251 | void | |
252 | init_ssa_operands (void) | |
253 | { | |
f3940b0e AM |
254 | build_defs = VEC_alloc (tree, heap, 5); |
255 | build_uses = VEC_alloc (tree, heap, 10); | |
256 | build_vuses = VEC_alloc (tree, heap, 25); | |
257 | build_v_may_defs = VEC_alloc (tree, heap, 25); | |
258 | build_v_must_defs = VEC_alloc (tree, heap, 25); | |
259 | ||
f47c96aa AM |
260 | gcc_assert (operand_memory == NULL); |
261 | operand_memory_index = SSA_OPERAND_MEMORY_SIZE; | |
262 | ops_active = true; | |
d16a5e36 DB |
263 | memset (&clobber_stats, 0, sizeof (clobber_stats)); |
264 | ||
f47c96aa | 265 | } |
6de9cd9a | 266 | |
1a24f92f | 267 | |
f47c96aa AM |
268 | /* Dispose of anything required by the operand routines. */ |
269 | ||
270 | void | |
271 | fini_ssa_operands (void) | |
272 | { | |
273 | struct ssa_operand_memory_d *ptr; | |
f3940b0e AM |
274 | VEC_free (tree, heap, build_defs); |
275 | VEC_free (tree, heap, build_uses); | |
276 | VEC_free (tree, heap, build_v_must_defs); | |
277 | VEC_free (tree, heap, build_v_may_defs); | |
278 | VEC_free (tree, heap, build_vuses); | |
f47c96aa AM |
279 | free_defs = NULL; |
280 | free_uses = NULL; | |
281 | free_vuses = NULL; | |
282 | free_maydefs = NULL; | |
283 | free_mustdefs = NULL; | |
284 | while ((ptr = operand_memory) != NULL) | |
285 | { | |
286 | operand_memory = operand_memory->next; | |
287 | ggc_free (ptr); | |
1a24f92f AM |
288 | } |
289 | ||
f47c96aa | 290 | ops_active = false; |
d16a5e36 DB |
291 | |
292 | if (dump_file && (dump_flags & TDF_STATS)) | |
293 | { | |
294 | fprintf (dump_file, "Original clobbered vars:%d\n", clobber_stats.clobbered_vars); | |
295 | fprintf (dump_file, "Static write clobbers avoided:%d\n", clobber_stats.static_write_clobbers_avoided); | |
296 | fprintf (dump_file, "Static read clobbers avoided:%d\n", clobber_stats.static_read_clobbers_avoided); | |
297 | fprintf (dump_file, "Unescapable clobbers avoided:%d\n", clobber_stats.unescapable_clobbers_avoided); | |
298 | fprintf (dump_file, "Original readonly clobbers:%d\n", clobber_stats.readonly_clobbers); | |
299 | fprintf (dump_file, "Static readonly clobbers avoided:%d\n", clobber_stats.static_readonly_clobbers_avoided); | |
300 | } | |
f47c96aa | 301 | } |
1a24f92f | 302 | |
6de9cd9a | 303 | |
f47c96aa AM |
304 | /* Return memory for operands of SIZE chunks. */ |
305 | ||
306 | static inline void * | |
307 | ssa_operand_alloc (unsigned size) | |
308 | { | |
309 | char *ptr; | |
310 | if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE) | |
311 | { | |
312 | struct ssa_operand_memory_d *ptr; | |
e1111e8e | 313 | ptr = GGC_NEW (struct ssa_operand_memory_d); |
f47c96aa AM |
314 | ptr->next = operand_memory; |
315 | operand_memory = ptr; | |
316 | operand_memory_index = 0; | |
317 | } | |
318 | ptr = &(operand_memory->mem[operand_memory_index]); | |
319 | operand_memory_index += size; | |
320 | return ptr; | |
6de9cd9a DN |
321 | } |
322 | ||
1a24f92f | 323 | |
2e48874f | 324 | /* Make sure PTR is in the correct immediate use list. Since uses are simply |
f430bae8 AM |
325 | pointers into the stmt TREE, there is no way of telling if anyone has |
326 | changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>. | |
2e48874f | 327 | The contents are different, but the pointer is still the same. This |
f430bae8 | 328 | routine will check to make sure PTR is in the correct list, and if it isn't |
3623aa70 AM |
329 | put it in the correct list. We cannot simply check the previous node |
330 | because all nodes in the same stmt might have be changed. */ | |
f430bae8 AM |
331 | |
332 | static inline void | |
f47c96aa | 333 | correct_use_link (use_operand_p ptr, tree stmt) |
f430bae8 | 334 | { |
f47c96aa | 335 | use_operand_p prev; |
f430bae8 AM |
336 | tree root; |
337 | ||
338 | /* Fold_stmt () may have changed the stmt pointers. */ | |
339 | if (ptr->stmt != stmt) | |
340 | ptr->stmt = stmt; | |
341 | ||
342 | prev = ptr->prev; | |
343 | if (prev) | |
344 | { | |
a56b5394 AM |
345 | /* Find the root element, making sure we skip any safe iterators. */ |
346 | while (prev->use != NULL || prev->stmt == NULL) | |
347 | prev = prev->prev; | |
3623aa70 AM |
348 | |
349 | /* Get the ssa_name of the list the node is in. */ | |
a56b5394 | 350 | root = prev->stmt; |
f3b569ca | 351 | /* If it's the right list, simply return. */ |
f430bae8 AM |
352 | if (root == *(ptr->use)) |
353 | return; | |
354 | } | |
355 | /* Its in the wrong list if we reach here. */ | |
356 | delink_imm_use (ptr); | |
357 | link_imm_use (ptr, *(ptr->use)); | |
358 | } | |
359 | ||
360 | ||
5dc2e333 AM |
361 | /* This routine makes sure that PTR is in an immediate use list, and makes |
362 | sure the stmt pointer is set to the current stmt. Virtual uses do not need | |
363 | the overhead of correct_use_link since they cannot be directly manipulated | |
364 | like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */ | |
365 | static inline void | |
366 | set_virtual_use_link (use_operand_p ptr, tree stmt) | |
367 | { | |
368 | /* Fold_stmt () may have changed the stmt pointers. */ | |
369 | if (ptr->stmt != stmt) | |
370 | ptr->stmt = stmt; | |
371 | ||
372 | /* If this use isn't in a list, add it to the correct list. */ | |
373 | if (!ptr->prev) | |
374 | link_imm_use (ptr, *(ptr->use)); | |
375 | } | |
376 | ||
377 | ||
378 | ||
f47c96aa | 379 | #define FINALIZE_OPBUILD build_defs |
f3940b0e AM |
380 | #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \ |
381 | build_defs, (I)) | |
382 | #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \ | |
383 | build_defs, (I)) | |
f47c96aa AM |
384 | #define FINALIZE_FUNC finalize_ssa_def_ops |
385 | #define FINALIZE_ALLOC alloc_def | |
386 | #define FINALIZE_FREE free_defs | |
387 | #define FINALIZE_TYPE struct def_optype_d | |
388 | #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr) | |
389 | #define FINALIZE_OPS DEF_OPS | |
390 | #define FINALIZE_BASE(VAR) VAR | |
391 | #define FINALIZE_BASE_TYPE tree * | |
392 | #define FINALIZE_BASE_ZERO NULL | |
393 | #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL) | |
394 | #include "tree-ssa-opfinalize.h" | |
1a24f92f | 395 | |
f47c96aa AM |
396 | |
397 | /* This routine will create stmt operands for STMT from the def build list. */ | |
398 | ||
399 | static void | |
400 | finalize_ssa_defs (tree stmt) | |
6de9cd9a | 401 | { |
f3940b0e | 402 | unsigned int num = VEC_length (tree, build_defs); |
f47c96aa AM |
403 | /* There should only be a single real definition per assignment. */ |
404 | gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1); | |
6de9cd9a | 405 | |
f47c96aa AM |
406 | /* If there is an old list, often the new list is identical, or close, so |
407 | find the elements at the beginning that are the same as the vector. */ | |
408 | ||
409 | finalize_ssa_def_ops (stmt); | |
f3940b0e | 410 | VEC_truncate (tree, build_defs, 0); |
f47c96aa | 411 | } |
6de9cd9a | 412 | |
f47c96aa | 413 | #define FINALIZE_OPBUILD build_uses |
f3940b0e AM |
414 | #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \ |
415 | build_uses, (I)) | |
416 | #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \ | |
417 | build_uses, (I)) | |
f47c96aa AM |
418 | #define FINALIZE_FUNC finalize_ssa_use_ops |
419 | #define FINALIZE_ALLOC alloc_use | |
420 | #define FINALIZE_FREE free_uses | |
421 | #define FINALIZE_TYPE struct use_optype_d | |
422 | #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use) | |
423 | #define FINALIZE_OPS USE_OPS | |
424 | #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR) | |
5dc2e333 | 425 | #define FINALIZE_CORRECT_USE correct_use_link |
f47c96aa AM |
426 | #define FINALIZE_BASE(VAR) VAR |
427 | #define FINALIZE_BASE_TYPE tree * | |
428 | #define FINALIZE_BASE_ZERO NULL | |
429 | #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \ | |
430 | (PTR)->use_ptr.use = (VAL); \ | |
431 | link_imm_use_stmt (&((PTR)->use_ptr), \ | |
432 | *(VAL), (STMT)) | |
433 | #include "tree-ssa-opfinalize.h" | |
434 | ||
435 | /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */ | |
436 | ||
437 | static void | |
438 | finalize_ssa_uses (tree stmt) | |
439 | { | |
6de9cd9a DN |
440 | #ifdef ENABLE_CHECKING |
441 | { | |
442 | unsigned x; | |
f3940b0e | 443 | unsigned num = VEC_length (tree, build_uses); |
f47c96aa | 444 | |
6de9cd9a | 445 | /* If the pointer to the operand is the statement itself, something is |
f47c96aa AM |
446 | wrong. It means that we are pointing to a local variable (the |
447 | initial call to get_stmt_operands does not pass a pointer to a | |
448 | statement). */ | |
6de9cd9a | 449 | for (x = 0; x < num; x++) |
f3940b0e | 450 | gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt); |
6de9cd9a DN |
451 | } |
452 | #endif | |
f47c96aa | 453 | finalize_ssa_use_ops (stmt); |
f3940b0e | 454 | VEC_truncate (tree, build_uses, 0); |
6de9cd9a | 455 | } |
f47c96aa AM |
456 | |
457 | ||
458 | /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */ | |
459 | #define FINALIZE_OPBUILD build_v_may_defs | |
f3940b0e AM |
460 | #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I)) |
461 | #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \ | |
462 | build_v_may_defs, (I))) | |
f47c96aa AM |
463 | #define FINALIZE_FUNC finalize_ssa_v_may_def_ops |
464 | #define FINALIZE_ALLOC alloc_maydef | |
465 | #define FINALIZE_FREE free_maydefs | |
466 | #define FINALIZE_TYPE struct maydef_optype_d | |
467 | #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR) | |
468 | #define FINALIZE_OPS MAYDEF_OPS | |
469 | #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR) | |
5dc2e333 | 470 | #define FINALIZE_CORRECT_USE set_virtual_use_link |
f47c96aa | 471 | #define FINALIZE_BASE_ZERO 0 |
f3940b0e | 472 | #define FINALIZE_BASE(VAR) get_name_decl (VAR) |
f47c96aa AM |
473 | #define FINALIZE_BASE_TYPE unsigned |
474 | #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \ | |
475 | (PTR)->def_var = (VAL); \ | |
476 | (PTR)->use_var = (VAL); \ | |
477 | (PTR)->use_ptr.use = &((PTR)->use_var); \ | |
478 | link_imm_use_stmt (&((PTR)->use_ptr), \ | |
479 | (VAL), (STMT)) | |
480 | #include "tree-ssa-opfinalize.h" | |
481 | ||
482 | ||
483 | static void | |
484 | finalize_ssa_v_may_defs (tree stmt) | |
6de9cd9a | 485 | { |
f47c96aa | 486 | finalize_ssa_v_may_def_ops (stmt); |
6de9cd9a | 487 | } |
f47c96aa | 488 | |
6de9cd9a | 489 | |
e288e2f5 AM |
490 | /* Clear the in_list bits and empty the build array for v_may_defs. */ |
491 | ||
492 | static inline void | |
493 | cleanup_v_may_defs (void) | |
494 | { | |
495 | unsigned x, num; | |
f3940b0e | 496 | num = VEC_length (tree, build_v_may_defs); |
e288e2f5 AM |
497 | |
498 | for (x = 0; x < num; x++) | |
499 | { | |
f3940b0e | 500 | tree t = VEC_index (tree, build_v_may_defs, x); |
f47c96aa AM |
501 | if (TREE_CODE (t) != SSA_NAME) |
502 | { | |
503 | var_ann_t ann = var_ann (t); | |
504 | ann->in_v_may_def_list = 0; | |
505 | } | |
e288e2f5 | 506 | } |
f3940b0e | 507 | VEC_truncate (tree, build_v_may_defs, 0); |
f47c96aa AM |
508 | } |
509 | ||
510 | ||
511 | #define FINALIZE_OPBUILD build_vuses | |
f3940b0e AM |
512 | #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I)) |
513 | #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \ | |
514 | build_vuses, (I))) | |
f47c96aa AM |
515 | #define FINALIZE_FUNC finalize_ssa_vuse_ops |
516 | #define FINALIZE_ALLOC alloc_vuse | |
517 | #define FINALIZE_FREE free_vuses | |
518 | #define FINALIZE_TYPE struct vuse_optype_d | |
519 | #define FINALIZE_ELEM(PTR) VUSE_OP (PTR) | |
520 | #define FINALIZE_OPS VUSE_OPS | |
521 | #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR) | |
5dc2e333 | 522 | #define FINALIZE_CORRECT_USE set_virtual_use_link |
f47c96aa | 523 | #define FINALIZE_BASE_ZERO 0 |
f3940b0e | 524 | #define FINALIZE_BASE(VAR) get_name_decl (VAR) |
f47c96aa AM |
525 | #define FINALIZE_BASE_TYPE unsigned |
526 | #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \ | |
527 | (PTR)->use_var = (VAL); \ | |
528 | (PTR)->use_ptr.use = &((PTR)->use_var); \ | |
529 | link_imm_use_stmt (&((PTR)->use_ptr), \ | |
530 | (VAL), (STMT)) | |
531 | #include "tree-ssa-opfinalize.h" | |
e288e2f5 | 532 | |
1a24f92f | 533 | |
f47c96aa AM |
534 | /* Return a new vuse operand vector, comparing to OLD_OPS_P. */ |
535 | ||
536 | static void | |
537 | finalize_ssa_vuses (tree stmt) | |
1a24f92f | 538 | { |
f47c96aa | 539 | unsigned num, num_v_may_defs; |
f3940b0e | 540 | unsigned vuse_index; |
6de9cd9a DN |
541 | |
542 | /* Remove superfluous VUSE operands. If the statement already has a | |
a32b97a2 BB |
543 | V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not |
544 | needed because V_MAY_DEFs imply a VUSE of the variable. For instance, | |
6de9cd9a DN |
545 | suppose that variable 'a' is aliased: |
546 | ||
547 | # VUSE <a_2> | |
a32b97a2 | 548 | # a_3 = V_MAY_DEF <a_2> |
6de9cd9a DN |
549 | a = a + 1; |
550 | ||
a32b97a2 | 551 | The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF |
6de9cd9a DN |
552 | operation. */ |
553 | ||
f3940b0e AM |
554 | num = VEC_length (tree, build_vuses); |
555 | num_v_may_defs = VEC_length (tree, build_v_may_defs); | |
1a24f92f | 556 | |
f47c96aa | 557 | if (num > 0 && num_v_may_defs > 0) |
6de9cd9a | 558 | { |
f3940b0e | 559 | for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); ) |
f47c96aa AM |
560 | { |
561 | tree vuse; | |
f3940b0e | 562 | vuse = VEC_index (tree, build_vuses, vuse_index); |
e288e2f5 | 563 | if (TREE_CODE (vuse) != SSA_NAME) |
6de9cd9a | 564 | { |
e288e2f5 AM |
565 | var_ann_t ann = var_ann (vuse); |
566 | ann->in_vuse_list = 0; | |
567 | if (ann->in_v_may_def_list) | |
568 | { | |
f3940b0e | 569 | VEC_ordered_remove (tree, build_vuses, vuse_index); |
f47c96aa | 570 | continue; |
6de9cd9a | 571 | } |
6de9cd9a | 572 | } |
f3940b0e | 573 | vuse_index++; |
6de9cd9a DN |
574 | } |
575 | } | |
e288e2f5 AM |
576 | else |
577 | /* Clear out the in_list bits. */ | |
f3940b0e AM |
578 | for (vuse_index = 0; |
579 | vuse_index < VEC_length (tree, build_vuses); | |
580 | vuse_index++) | |
e288e2f5 | 581 | { |
f3940b0e | 582 | tree t = VEC_index (tree, build_vuses, vuse_index); |
e288e2f5 AM |
583 | if (TREE_CODE (t) != SSA_NAME) |
584 | { | |
585 | var_ann_t ann = var_ann (t); | |
586 | ann->in_vuse_list = 0; | |
587 | } | |
588 | } | |
589 | ||
f47c96aa AM |
590 | finalize_ssa_vuse_ops (stmt); |
591 | /* The v_may_def build vector wasn't cleaned up because we needed it. */ | |
e288e2f5 | 592 | cleanup_v_may_defs (); |
f47c96aa AM |
593 | |
594 | /* Free the vuses build vector. */ | |
f3940b0e | 595 | VEC_truncate (tree, build_vuses, 0); |
1a24f92f | 596 | |
6de9cd9a | 597 | } |
f47c96aa | 598 | |
1a24f92f | 599 | /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */ |
f47c96aa AM |
600 | |
601 | #define FINALIZE_OPBUILD build_v_must_defs | |
f3940b0e AM |
602 | #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I)) |
603 | #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \ | |
604 | build_v_must_defs, (I))) | |
f47c96aa AM |
605 | #define FINALIZE_FUNC finalize_ssa_v_must_def_ops |
606 | #define FINALIZE_ALLOC alloc_mustdef | |
607 | #define FINALIZE_FREE free_mustdefs | |
608 | #define FINALIZE_TYPE struct mustdef_optype_d | |
609 | #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR) | |
610 | #define FINALIZE_OPS MUSTDEF_OPS | |
611 | #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR) | |
5dc2e333 | 612 | #define FINALIZE_CORRECT_USE set_virtual_use_link |
f47c96aa | 613 | #define FINALIZE_BASE_ZERO 0 |
f3940b0e | 614 | #define FINALIZE_BASE(VAR) get_name_decl (VAR) |
f47c96aa AM |
615 | #define FINALIZE_BASE_TYPE unsigned |
616 | #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \ | |
617 | (PTR)->def_var = (VAL); \ | |
618 | (PTR)->kill_var = (VAL); \ | |
619 | (PTR)->use_ptr.use = &((PTR)->kill_var);\ | |
620 | link_imm_use_stmt (&((PTR)->use_ptr), \ | |
621 | (VAL), (STMT)) | |
622 | #include "tree-ssa-opfinalize.h" | |
1a24f92f | 623 | |
a32b97a2 | 624 | |
f47c96aa AM |
625 | static void |
626 | finalize_ssa_v_must_defs (tree stmt) | |
627 | { | |
c75ab022 DB |
628 | /* In the presence of subvars, there may be more than one V_MUST_DEF per |
629 | statement (one for each subvar). It is a bit expensive to verify that | |
630 | all must-defs in a statement belong to subvars if there is more than one | |
631 | MUST-def, so we don't do it. Suffice to say, if you reach here without | |
632 | having subvars, and have num >1, you have hit a bug. */ | |
a32b97a2 | 633 | |
f47c96aa | 634 | finalize_ssa_v_must_def_ops (stmt); |
f3940b0e | 635 | VEC_truncate (tree, build_v_must_defs, 0); |
a32b97a2 BB |
636 | } |
637 | ||
6de9cd9a | 638 | |
1a24f92f | 639 | /* Finalize all the build vectors, fill the new ones into INFO. */ |
f47c96aa | 640 | |
1a24f92f | 641 | static inline void |
f47c96aa | 642 | finalize_ssa_stmt_operands (tree stmt) |
1a24f92f | 643 | { |
f47c96aa AM |
644 | finalize_ssa_defs (stmt); |
645 | finalize_ssa_uses (stmt); | |
646 | finalize_ssa_v_must_defs (stmt); | |
647 | finalize_ssa_v_may_defs (stmt); | |
648 | finalize_ssa_vuses (stmt); | |
6de9cd9a DN |
649 | } |
650 | ||
651 | ||
1a24f92f AM |
652 | /* Start the process of building up operands vectors in INFO. */ |
653 | ||
654 | static inline void | |
655 | start_ssa_stmt_operands (void) | |
6de9cd9a | 656 | { |
f3940b0e AM |
657 | gcc_assert (VEC_length (tree, build_defs) == 0); |
658 | gcc_assert (VEC_length (tree, build_uses) == 0); | |
659 | gcc_assert (VEC_length (tree, build_vuses) == 0); | |
660 | gcc_assert (VEC_length (tree, build_v_may_defs) == 0); | |
661 | gcc_assert (VEC_length (tree, build_v_must_defs) == 0); | |
6de9cd9a DN |
662 | } |
663 | ||
664 | ||
1a24f92f | 665 | /* Add DEF_P to the list of pointers to operands. */ |
6de9cd9a DN |
666 | |
667 | static inline void | |
1a24f92f | 668 | append_def (tree *def_p) |
6de9cd9a | 669 | { |
f3940b0e | 670 | VEC_safe_push (tree, heap, build_defs, (tree)def_p); |
6de9cd9a DN |
671 | } |
672 | ||
673 | ||
1a24f92f | 674 | /* Add USE_P to the list of pointers to operands. */ |
6de9cd9a DN |
675 | |
676 | static inline void | |
1a24f92f | 677 | append_use (tree *use_p) |
6de9cd9a | 678 | { |
f3940b0e | 679 | VEC_safe_push (tree, heap, build_uses, (tree)use_p); |
6de9cd9a DN |
680 | } |
681 | ||
682 | ||
1a24f92f | 683 | /* Add a new virtual may def for variable VAR to the build array. */ |
6de9cd9a | 684 | |
1a24f92f AM |
685 | static inline void |
686 | append_v_may_def (tree var) | |
6de9cd9a | 687 | { |
f47c96aa AM |
688 | if (TREE_CODE (var) != SSA_NAME) |
689 | { | |
690 | var_ann_t ann = get_var_ann (var); | |
6de9cd9a | 691 | |
f47c96aa AM |
692 | /* Don't allow duplicate entries. */ |
693 | if (ann->in_v_may_def_list) | |
694 | return; | |
695 | ann->in_v_may_def_list = 1; | |
696 | } | |
6de9cd9a | 697 | |
f3940b0e | 698 | VEC_safe_push (tree, heap, build_v_may_defs, (tree)var); |
6de9cd9a DN |
699 | } |
700 | ||
701 | ||
1a24f92f | 702 | /* Add VAR to the list of virtual uses. */ |
6de9cd9a | 703 | |
1a24f92f AM |
704 | static inline void |
705 | append_vuse (tree var) | |
6de9cd9a | 706 | { |
6de9cd9a DN |
707 | |
708 | /* Don't allow duplicate entries. */ | |
e288e2f5 AM |
709 | if (TREE_CODE (var) != SSA_NAME) |
710 | { | |
711 | var_ann_t ann = get_var_ann (var); | |
712 | ||
713 | if (ann->in_vuse_list || ann->in_v_may_def_list) | |
714 | return; | |
715 | ann->in_vuse_list = 1; | |
716 | } | |
6de9cd9a | 717 | |
f3940b0e | 718 | VEC_safe_push (tree, heap, build_vuses, (tree)var); |
6de9cd9a DN |
719 | } |
720 | ||
a32b97a2 | 721 | |
1a24f92f | 722 | /* Add VAR to the list of virtual must definitions for INFO. */ |
a32b97a2 | 723 | |
1a24f92f AM |
724 | static inline void |
725 | append_v_must_def (tree var) | |
726 | { | |
727 | unsigned i; | |
a32b97a2 BB |
728 | |
729 | /* Don't allow duplicate entries. */ | |
f3940b0e AM |
730 | for (i = 0; i < VEC_length (tree, build_v_must_defs); i++) |
731 | if (var == VEC_index (tree, build_v_must_defs, i)) | |
1a24f92f | 732 | return; |
a32b97a2 | 733 | |
f3940b0e | 734 | VEC_safe_push (tree, heap, build_v_must_defs, (tree)var); |
a32b97a2 BB |
735 | } |
736 | ||
6de9cd9a | 737 | |
f430bae8 | 738 | /* Parse STMT looking for operands. OLD_OPS is the original stmt operand |
f652d14b | 739 | cache for STMT, if it existed before. When finished, the various build_* |
f430bae8 AM |
740 | operand vectors will have potential operands. in them. */ |
741 | ||
d05eae88 | 742 | static void |
f430bae8 | 743 | parse_ssa_operands (tree stmt) |
6de9cd9a DN |
744 | { |
745 | enum tree_code code; | |
6de9cd9a DN |
746 | |
747 | code = TREE_CODE (stmt); | |
748 | switch (code) | |
749 | { | |
750 | case MODIFY_EXPR: | |
9390c347 RK |
751 | /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if |
752 | either only part of LHS is modified or if the RHS might throw, | |
753 | otherwise, use V_MUST_DEF. | |
754 | ||
755 | ??? If it might throw, we should represent somehow that it is killed | |
756 | on the fallthrough path. */ | |
757 | { | |
758 | tree lhs = TREE_OPERAND (stmt, 0); | |
759 | int lhs_flags = opf_is_def; | |
760 | ||
761 | get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none); | |
762 | ||
763 | /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether | |
764 | or not the entire LHS is modified; that depends on what's | |
765 | inside the VIEW_CONVERT_EXPR. */ | |
766 | if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR) | |
767 | lhs = TREE_OPERAND (lhs, 0); | |
768 | ||
a916f21d | 769 | if (TREE_CODE (lhs) != ARRAY_RANGE_REF |
8ae5e6f2 | 770 | && TREE_CODE (lhs) != BIT_FIELD_REF) |
9390c347 RK |
771 | lhs_flags |= opf_kill_def; |
772 | ||
773 | get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags); | |
774 | } | |
6de9cd9a DN |
775 | break; |
776 | ||
777 | case COND_EXPR: | |
1a24f92f | 778 | get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none); |
6de9cd9a DN |
779 | break; |
780 | ||
781 | case SWITCH_EXPR: | |
1a24f92f | 782 | get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none); |
6de9cd9a DN |
783 | break; |
784 | ||
785 | case ASM_EXPR: | |
1a24f92f | 786 | get_asm_expr_operands (stmt); |
6de9cd9a DN |
787 | break; |
788 | ||
789 | case RETURN_EXPR: | |
1a24f92f | 790 | get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none); |
6de9cd9a DN |
791 | break; |
792 | ||
793 | case GOTO_EXPR: | |
1a24f92f | 794 | get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none); |
6de9cd9a DN |
795 | break; |
796 | ||
797 | case LABEL_EXPR: | |
1a24f92f | 798 | get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none); |
6de9cd9a DN |
799 | break; |
800 | ||
801 | /* These nodes contain no variable references. */ | |
802 | case BIND_EXPR: | |
803 | case CASE_LABEL_EXPR: | |
804 | case TRY_CATCH_EXPR: | |
805 | case TRY_FINALLY_EXPR: | |
806 | case EH_FILTER_EXPR: | |
807 | case CATCH_EXPR: | |
808 | case RESX_EXPR: | |
809 | break; | |
810 | ||
811 | default: | |
812 | /* Notice that if get_expr_operands tries to use &STMT as the operand | |
0e61db61 | 813 | pointer (which may only happen for USE operands), we will fail in |
77c9db77 RH |
814 | append_use. This default will handle statements like empty |
815 | statements, or CALL_EXPRs that may appear on the RHS of a statement | |
6de9cd9a | 816 | or as statements themselves. */ |
1a24f92f | 817 | get_expr_operands (stmt, &stmt, opf_none); |
6de9cd9a DN |
818 | break; |
819 | } | |
f430bae8 AM |
820 | } |
821 | ||
d14c5160 | 822 | /* Create an operands cache for STMT. */ |
f430bae8 AM |
823 | |
824 | static void | |
f47c96aa | 825 | build_ssa_operands (tree stmt) |
f430bae8 | 826 | { |
f47c96aa | 827 | stmt_ann_t ann = get_stmt_ann (stmt); |
f430bae8 | 828 | |
ff88c5aa | 829 | /* Initially assume that the statement has no volatile operands. */ |
f430bae8 | 830 | if (ann) |
ff88c5aa | 831 | ann->has_volatile_ops = false; |
f430bae8 AM |
832 | |
833 | start_ssa_stmt_operands (); | |
834 | ||
835 | parse_ssa_operands (stmt); | |
f3940b0e AM |
836 | operand_build_sort_virtual (build_vuses); |
837 | operand_build_sort_virtual (build_v_may_defs); | |
838 | operand_build_sort_virtual (build_v_must_defs); | |
f430bae8 | 839 | |
f47c96aa | 840 | finalize_ssa_stmt_operands (stmt); |
1a24f92f AM |
841 | } |
842 | ||
843 | ||
844 | /* Free any operands vectors in OPS. */ | |
6844185d | 845 | void |
1a24f92f AM |
846 | free_ssa_operands (stmt_operands_p ops) |
847 | { | |
f47c96aa AM |
848 | ops->def_ops = NULL; |
849 | ops->use_ops = NULL; | |
850 | ops->maydef_ops = NULL; | |
851 | ops->mustdef_ops = NULL; | |
852 | ops->vuse_ops = NULL; | |
f47c96aa | 853 | } |
f47c96aa AM |
854 | |
855 | ||
856 | /* Get the operands of statement STMT. Note that repeated calls to | |
857 | get_stmt_operands for the same statement will do nothing until the | |
858 | statement is marked modified by a call to mark_stmt_modified(). */ | |
859 | ||
860 | void | |
861 | update_stmt_operands (tree stmt) | |
862 | { | |
863 | stmt_ann_t ann = get_stmt_ann (stmt); | |
864 | /* If get_stmt_operands is called before SSA is initialized, dont | |
865 | do anything. */ | |
866 | if (!ssa_operands_active ()) | |
867 | return; | |
868 | /* The optimizers cannot handle statements that are nothing but a | |
869 | _DECL. This indicates a bug in the gimplifier. */ | |
870 | gcc_assert (!SSA_VAR_P (stmt)); | |
871 | ||
872 | gcc_assert (ann->modified); | |
873 | ||
874 | timevar_push (TV_TREE_OPS); | |
875 | ||
876 | build_ssa_operands (stmt); | |
877 | ||
878 | /* Clear the modified bit for STMT. Subsequent calls to | |
879 | get_stmt_operands for this statement will do nothing until the | |
880 | statement is marked modified by a call to mark_stmt_modified(). */ | |
881 | ann->modified = 0; | |
882 | ||
883 | timevar_pop (TV_TREE_OPS); | |
884 | } | |
885 | ||
886 | ||
887 | /* Copies virtual operands from SRC to DST. */ | |
888 | ||
889 | void | |
890 | copy_virtual_operands (tree dest, tree src) | |
891 | { | |
892 | tree t; | |
893 | ssa_op_iter iter, old_iter; | |
894 | use_operand_p use_p, u2; | |
895 | def_operand_p def_p, d2; | |
896 | ||
897 | build_ssa_operands (dest); | |
898 | ||
395bda42 | 899 | /* Copy all the virtual fields. */ |
f47c96aa AM |
900 | FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE) |
901 | append_vuse (t); | |
902 | FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF) | |
903 | append_v_may_def (t); | |
904 | FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF) | |
905 | append_v_must_def (t); | |
906 | ||
f3940b0e AM |
907 | if (VEC_length (tree, build_vuses) == 0 |
908 | && VEC_length (tree, build_v_may_defs) == 0 | |
909 | && VEC_length (tree, build_v_must_defs) == 0) | |
f47c96aa AM |
910 | return; |
911 | ||
912 | /* Now commit the virtual operands to this stmt. */ | |
913 | finalize_ssa_v_must_defs (dest); | |
914 | finalize_ssa_v_may_defs (dest); | |
915 | finalize_ssa_vuses (dest); | |
916 | ||
917 | /* Finally, set the field to the same values as then originals. */ | |
918 | ||
919 | ||
920 | t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE); | |
921 | FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE) | |
922 | { | |
923 | gcc_assert (!op_iter_done (&old_iter)); | |
924 | SET_USE (use_p, t); | |
925 | t = op_iter_next_tree (&old_iter); | |
926 | } | |
927 | gcc_assert (op_iter_done (&old_iter)); | |
928 | ||
929 | op_iter_init_maydef (&old_iter, src, &u2, &d2); | |
930 | FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter) | |
931 | { | |
932 | gcc_assert (!op_iter_done (&old_iter)); | |
933 | SET_USE (use_p, USE_FROM_PTR (u2)); | |
934 | SET_DEF (def_p, DEF_FROM_PTR (d2)); | |
935 | op_iter_next_maymustdef (&u2, &d2, &old_iter); | |
936 | } | |
937 | gcc_assert (op_iter_done (&old_iter)); | |
938 | ||
939 | op_iter_init_mustdef (&old_iter, src, &u2, &d2); | |
940 | FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter) | |
941 | { | |
942 | gcc_assert (!op_iter_done (&old_iter)); | |
943 | SET_USE (use_p, USE_FROM_PTR (u2)); | |
944 | SET_DEF (def_p, DEF_FROM_PTR (d2)); | |
945 | op_iter_next_maymustdef (&u2, &d2, &old_iter); | |
946 | } | |
947 | gcc_assert (op_iter_done (&old_iter)); | |
948 | ||
1a24f92f AM |
949 | } |
950 | ||
951 | ||
f47c96aa AM |
952 | /* Specifically for use in DOM's expression analysis. Given a store, we |
953 | create an artificial stmt which looks like a load from the store, this can | |
954 | be used to eliminate redundant loads. OLD_OPS are the operands from the | |
955 | store stmt, and NEW_STMT is the new load which represents a load of the | |
956 | values stored. */ | |
957 | ||
958 | void | |
959 | create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt) | |
960 | { | |
961 | stmt_ann_t ann; | |
962 | tree op; | |
963 | ssa_op_iter iter; | |
964 | use_operand_p use_p; | |
965 | unsigned x; | |
966 | ||
967 | ann = get_stmt_ann (new_stmt); | |
968 | ||
969 | /* process the stmt looking for operands. */ | |
970 | start_ssa_stmt_operands (); | |
971 | parse_ssa_operands (new_stmt); | |
972 | ||
f3940b0e | 973 | for (x = 0; x < VEC_length (tree, build_vuses); x++) |
f47c96aa | 974 | { |
f3940b0e | 975 | tree t = VEC_index (tree, build_vuses, x); |
f47c96aa AM |
976 | if (TREE_CODE (t) != SSA_NAME) |
977 | { | |
978 | var_ann_t ann = var_ann (t); | |
979 | ann->in_vuse_list = 0; | |
980 | } | |
981 | } | |
982 | ||
f3940b0e | 983 | for (x = 0; x < VEC_length (tree, build_v_may_defs); x++) |
f47c96aa | 984 | { |
f3940b0e | 985 | tree t = VEC_index (tree, build_v_may_defs, x); |
f47c96aa AM |
986 | if (TREE_CODE (t) != SSA_NAME) |
987 | { | |
988 | var_ann_t ann = var_ann (t); | |
989 | ann->in_v_may_def_list = 0; | |
990 | } | |
991 | } | |
992 | /* Remove any virtual operands that were found. */ | |
f3940b0e AM |
993 | VEC_truncate (tree, build_v_may_defs, 0); |
994 | VEC_truncate (tree, build_v_must_defs, 0); | |
995 | VEC_truncate (tree, build_vuses, 0); | |
f47c96aa AM |
996 | |
997 | /* For each VDEF on the original statement, we want to create a | |
998 | VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new | |
999 | statement. */ | |
1000 | FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, | |
1001 | (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF)) | |
1002 | append_vuse (op); | |
1003 | ||
1004 | /* Now build the operands for this new stmt. */ | |
1005 | finalize_ssa_stmt_operands (new_stmt); | |
1006 | ||
1007 | /* All uses in this fake stmt must not be in the immediate use lists. */ | |
1008 | FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES) | |
1009 | delink_imm_use (use_p); | |
1010 | } | |
f430bae8 | 1011 | |
3c7d0735 | 1012 | void |
f47c96aa | 1013 | swap_tree_operands (tree stmt, tree *exp0, tree *exp1) |
f430bae8 AM |
1014 | { |
1015 | tree op0, op1; | |
1016 | op0 = *exp0; | |
1017 | op1 = *exp1; | |
1018 | ||
1019 | /* If the operand cache is active, attempt to preserve the relative positions | |
1020 | of these two operands in their respective immediate use lists. */ | |
f47c96aa | 1021 | if (ssa_operands_active () && op0 != op1) |
f430bae8 | 1022 | { |
f47c96aa AM |
1023 | use_optype_p use0, use1, ptr; |
1024 | use0 = use1 = NULL; | |
f430bae8 | 1025 | /* Find the 2 operands in the cache, if they are there. */ |
f47c96aa AM |
1026 | for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next) |
1027 | if (USE_OP_PTR (ptr)->use == exp0) | |
f430bae8 | 1028 | { |
f47c96aa | 1029 | use0 = ptr; |
f430bae8 AM |
1030 | break; |
1031 | } | |
f47c96aa AM |
1032 | for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next) |
1033 | if (USE_OP_PTR (ptr)->use == exp1) | |
f430bae8 | 1034 | { |
f47c96aa | 1035 | use1 = ptr; |
f430bae8 AM |
1036 | break; |
1037 | } | |
d566f6ef KH |
1038 | /* If both uses don't have operand entries, there isn't much we can do |
1039 | at this point. Presumably we dont need to worry about it. */ | |
f47c96aa | 1040 | if (use0 && use1) |
f430bae8 | 1041 | { |
f47c96aa AM |
1042 | tree *tmp = USE_OP_PTR (use1)->use; |
1043 | USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use; | |
1044 | USE_OP_PTR (use0)->use = tmp; | |
f430bae8 AM |
1045 | } |
1046 | } | |
1047 | ||
1048 | /* Now swap the data. */ | |
1049 | *exp0 = op1; | |
1050 | *exp1 = op0; | |
1051 | } | |
1052 | ||
206048bd VR |
1053 | /* Recursively scan the expression pointed to by EXPR_P in statement referred |
1054 | to by INFO. FLAGS is one of the OPF_* constants modifying how to interpret | |
1055 | the operands found. */ | |
6de9cd9a DN |
1056 | |
1057 | static void | |
1a24f92f | 1058 | get_expr_operands (tree stmt, tree *expr_p, int flags) |
6de9cd9a DN |
1059 | { |
1060 | enum tree_code code; | |
6615c446 | 1061 | enum tree_code_class class; |
6de9cd9a | 1062 | tree expr = *expr_p; |
e288e2f5 | 1063 | stmt_ann_t s_ann = stmt_ann (stmt); |
6de9cd9a | 1064 | |
7d3bf067 | 1065 | if (expr == NULL) |
6de9cd9a DN |
1066 | return; |
1067 | ||
1068 | code = TREE_CODE (expr); | |
1069 | class = TREE_CODE_CLASS (code); | |
1070 | ||
310de761 | 1071 | switch (code) |
6de9cd9a | 1072 | { |
310de761 | 1073 | case ADDR_EXPR: |
6de9cd9a | 1074 | /* Taking the address of a variable does not represent a |
1a24f92f | 1075 | reference to it, but the fact that the stmt takes its address will be |
6de9cd9a | 1076 | of interest to some passes (e.g. alias resolution). */ |
243cdfa8 ZD |
1077 | add_to_addressable_set (TREE_OPERAND (expr, 0), |
1078 | &s_ann->addresses_taken); | |
6de9cd9a | 1079 | |
d397dbcd DN |
1080 | /* If the address is invariant, there may be no interesting variable |
1081 | references inside. */ | |
1082 | if (is_gimple_min_invariant (expr)) | |
6de9cd9a DN |
1083 | return; |
1084 | ||
1085 | /* There should be no VUSEs created, since the referenced objects are | |
1086 | not really accessed. The only operands that we should find here | |
1087 | are ARRAY_REF indices which will always be real operands (GIMPLE | |
1088 | does not allow non-registers as array indices). */ | |
1089 | flags |= opf_no_vops; | |
1090 | ||
1a24f92f | 1091 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
310de761 | 1092 | return; |
44de5aeb | 1093 | |
310de761 | 1094 | case SSA_NAME: |
326eda4b DB |
1095 | case STRUCT_FIELD_TAG: |
1096 | case TYPE_MEMORY_TAG: | |
1097 | case NAME_MEMORY_TAG: | |
8ae5e6f2 AP |
1098 | |
1099 | add_stmt_operand (expr_p, s_ann, flags); | |
1100 | return; | |
1101 | ||
310de761 RH |
1102 | case VAR_DECL: |
1103 | case PARM_DECL: | |
1104 | case RESULT_DECL: | |
c75ab022 DB |
1105 | { |
1106 | subvar_t svars; | |
1107 | ||
1108 | /* Add the subvars for a variable if it has subvars, to DEFS or USES. | |
1109 | Otherwise, add the variable itself. | |
1110 | Whether it goes to USES or DEFS depends on the operand flags. */ | |
1111 | if (var_can_have_subvars (expr) | |
1112 | && (svars = get_subvars_for_var (expr))) | |
1113 | { | |
1114 | subvar_t sv; | |
1115 | for (sv = svars; sv; sv = sv->next) | |
1116 | add_stmt_operand (&sv->var, s_ann, flags); | |
1117 | } | |
1118 | else | |
1119 | { | |
1120 | add_stmt_operand (expr_p, s_ann, flags); | |
1121 | } | |
1122 | return; | |
1123 | } | |
7ccf35ed DN |
1124 | case MISALIGNED_INDIRECT_REF: |
1125 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); | |
1126 | /* fall through */ | |
1127 | ||
1128 | case ALIGN_INDIRECT_REF: | |
310de761 | 1129 | case INDIRECT_REF: |
3c0b6c43 DB |
1130 | get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, |
1131 | 0, -1, true); | |
6de9cd9a | 1132 | return; |
6de9cd9a | 1133 | |
ac182688 ZD |
1134 | case TARGET_MEM_REF: |
1135 | get_tmr_operands (stmt, expr, flags); | |
1136 | return; | |
1137 | ||
310de761 RH |
1138 | case ARRAY_RANGE_REF: |
1139 | /* Treat array references as references to the virtual variable | |
1140 | representing the array. The virtual variable for an ARRAY_REF | |
1141 | is the VAR_DECL for the array. */ | |
1142 | ||
6de9cd9a | 1143 | /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES |
243cdfa8 ZD |
1144 | according to the value of IS_DEF. */ |
1145 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
1a24f92f AM |
1146 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none); |
1147 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none); | |
1148 | get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none); | |
6de9cd9a | 1149 | return; |
6de9cd9a | 1150 | |
a916f21d | 1151 | case ARRAY_REF: |
310de761 RH |
1152 | case COMPONENT_REF: |
1153 | case REALPART_EXPR: | |
1154 | case IMAGPART_EXPR: | |
c75ab022 DB |
1155 | { |
1156 | tree ref; | |
6bec9271 | 1157 | HOST_WIDE_INT offset, size, maxsize; |
758cf3f2 | 1158 | bool none = true; |
c75ab022 DB |
1159 | /* This component ref becomes an access to all of the subvariables |
1160 | it can touch, if we can determine that, but *NOT* the real one. | |
1161 | If we can't determine which fields we could touch, the recursion | |
1162 | will eventually get to a variable and add *all* of its subvars, or | |
1163 | whatever is the minimum correct subset. */ | |
1164 | ||
6bec9271 RG |
1165 | ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize); |
1166 | if (SSA_VAR_P (ref) && get_subvars_for_var (ref)) | |
c75ab022 DB |
1167 | { |
1168 | subvar_t svars = get_subvars_for_var (ref); | |
1169 | subvar_t sv; | |
1170 | for (sv = svars; sv; sv = sv->next) | |
1171 | { | |
1172 | bool exact; | |
3c0b6c43 | 1173 | if (overlap_subvar (offset, maxsize, sv->var, &exact)) |
c75ab022 | 1174 | { |
98b6d477 | 1175 | int subvar_flags = flags; |
758cf3f2 | 1176 | none = false; |
6bec9271 RG |
1177 | if (!exact |
1178 | || size != maxsize) | |
7fac66d4 JH |
1179 | subvar_flags &= ~opf_kill_def; |
1180 | add_stmt_operand (&sv->var, s_ann, subvar_flags); | |
c75ab022 DB |
1181 | } |
1182 | } | |
758cf3f2 RG |
1183 | if (!none) |
1184 | flags |= opf_no_vops; | |
c75ab022 | 1185 | } |
3c0b6c43 DB |
1186 | else if (TREE_CODE (ref) == INDIRECT_REF) |
1187 | { | |
1188 | get_indirect_ref_operands (stmt, ref, flags, expr, | |
1189 | offset, maxsize, false); | |
1190 | flags |= opf_no_vops; | |
1191 | } | |
758cf3f2 RG |
1192 | |
1193 | /* Even if we found subvars above we need to ensure to see | |
1194 | immediate uses for d in s.a[d]. In case of s.a having | |
1195 | a subvar we'd miss it otherwise. */ | |
1196 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), | |
1197 | flags & ~opf_kill_def); | |
c75ab022 DB |
1198 | |
1199 | if (code == COMPONENT_REF) | |
305a1321 | 1200 | { |
707db096 | 1201 | if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1))) |
305a1321 MM |
1202 | s_ann->has_volatile_ops = true; |
1203 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none); | |
1204 | } | |
a916f21d RG |
1205 | else if (code == ARRAY_REF) |
1206 | { | |
1207 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none); | |
1208 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none); | |
1209 | get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none); | |
1210 | } | |
c75ab022 DB |
1211 | return; |
1212 | } | |
d25cee4d | 1213 | case WITH_SIZE_EXPR: |
0e28378a | 1214 | /* WITH_SIZE_EXPR is a pass-through reference to its first argument, |
d25cee4d | 1215 | and an rvalue reference to its second argument. */ |
1a24f92f AM |
1216 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none); |
1217 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
d25cee4d RH |
1218 | return; |
1219 | ||
310de761 | 1220 | case CALL_EXPR: |
1a24f92f | 1221 | get_call_expr_operands (stmt, expr); |
6de9cd9a | 1222 | return; |
6de9cd9a | 1223 | |
40923b20 | 1224 | case COND_EXPR: |
ad9f20cb DP |
1225 | case VEC_COND_EXPR: |
1226 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none); | |
40923b20 DP |
1227 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none); |
1228 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none); | |
1229 | return; | |
1230 | ||
310de761 | 1231 | case MODIFY_EXPR: |
d25cee4d RH |
1232 | { |
1233 | int subflags; | |
1234 | tree op; | |
1235 | ||
1a24f92f | 1236 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none); |
d25cee4d RH |
1237 | |
1238 | op = TREE_OPERAND (expr, 0); | |
1239 | if (TREE_CODE (op) == WITH_SIZE_EXPR) | |
1240 | op = TREE_OPERAND (expr, 0); | |
a916f21d | 1241 | if (TREE_CODE (op) == ARRAY_RANGE_REF |
d25cee4d RH |
1242 | || TREE_CODE (op) == REALPART_EXPR |
1243 | || TREE_CODE (op) == IMAGPART_EXPR) | |
1244 | subflags = opf_is_def; | |
1245 | else | |
1246 | subflags = opf_is_def | opf_kill_def; | |
1247 | ||
1a24f92f | 1248 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags); |
d25cee4d RH |
1249 | return; |
1250 | } | |
6de9cd9a | 1251 | |
7b48e1e0 RH |
1252 | case CONSTRUCTOR: |
1253 | { | |
1254 | /* General aggregate CONSTRUCTORs have been decomposed, but they | |
1255 | are still in use as the COMPLEX_EXPR equivalent for vectors. */ | |
4038c495 GB |
1256 | constructor_elt *ce; |
1257 | unsigned HOST_WIDE_INT idx; | |
7b48e1e0 | 1258 | |
4038c495 GB |
1259 | for (idx = 0; |
1260 | VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce); | |
1261 | idx++) | |
1262 | get_expr_operands (stmt, &ce->value, opf_none); | |
7b48e1e0 RH |
1263 | |
1264 | return; | |
1265 | } | |
1266 | ||
310de761 RH |
1267 | case TRUTH_NOT_EXPR: |
1268 | case BIT_FIELD_REF: | |
4626c433 | 1269 | case VIEW_CONVERT_EXPR: |
310de761 | 1270 | do_unary: |
1a24f92f | 1271 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
6de9cd9a | 1272 | return; |
6de9cd9a | 1273 | |
310de761 RH |
1274 | case TRUTH_AND_EXPR: |
1275 | case TRUTH_OR_EXPR: | |
1276 | case TRUTH_XOR_EXPR: | |
1277 | case COMPOUND_EXPR: | |
1278 | case OBJ_TYPE_REF: | |
0bca51f0 | 1279 | case ASSERT_EXPR: |
310de761 RH |
1280 | do_binary: |
1281 | { | |
1a24f92f AM |
1282 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
1283 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); | |
310de761 RH |
1284 | return; |
1285 | } | |
1286 | ||
20f06221 | 1287 | case DOT_PROD_EXPR: |
7ccf35ed DN |
1288 | case REALIGN_LOAD_EXPR: |
1289 | { | |
1290 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
1291 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); | |
1292 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags); | |
1293 | return; | |
1294 | } | |
1295 | ||
310de761 RH |
1296 | case BLOCK: |
1297 | case FUNCTION_DECL: | |
1298 | case EXC_PTR_EXPR: | |
1299 | case FILTER_EXPR: | |
1300 | case LABEL_DECL: | |
243cdfa8 | 1301 | case CONST_DECL: |
50674e96 DN |
1302 | case OMP_PARALLEL: |
1303 | case OMP_SECTIONS: | |
1304 | case OMP_FOR: | |
1305 | case OMP_RETURN_EXPR: | |
1306 | case OMP_SINGLE: | |
1307 | case OMP_MASTER: | |
1308 | case OMP_ORDERED: | |
1309 | case OMP_CRITICAL: | |
310de761 | 1310 | /* Expressions that make no memory references. */ |
6de9cd9a | 1311 | return; |
310de761 RH |
1312 | |
1313 | default: | |
6615c446 | 1314 | if (class == tcc_unary) |
310de761 | 1315 | goto do_unary; |
6615c446 | 1316 | if (class == tcc_binary || class == tcc_comparison) |
310de761 | 1317 | goto do_binary; |
6615c446 | 1318 | if (class == tcc_constant || class == tcc_type) |
310de761 | 1319 | return; |
6de9cd9a DN |
1320 | } |
1321 | ||
1322 | /* If we get here, something has gone wrong. */ | |
1e128c5f | 1323 | #ifdef ENABLE_CHECKING |
6de9cd9a DN |
1324 | fprintf (stderr, "unhandled expression in get_expr_operands():\n"); |
1325 | debug_tree (expr); | |
1326 | fputs ("\n", stderr); | |
1e128c5f GB |
1327 | internal_error ("internal error"); |
1328 | #endif | |
1329 | gcc_unreachable (); | |
6de9cd9a DN |
1330 | } |
1331 | ||
7c35745c | 1332 | |
6cb38cd4 | 1333 | /* Scan operands in the ASM_EXPR stmt referred to in INFO. */ |
a6d02559 DN |
1334 | |
1335 | static void | |
1a24f92f | 1336 | get_asm_expr_operands (tree stmt) |
a6d02559 | 1337 | { |
1a24f92f | 1338 | stmt_ann_t s_ann = stmt_ann (stmt); |
a6d02559 DN |
1339 | int noutputs = list_length (ASM_OUTPUTS (stmt)); |
1340 | const char **oconstraints | |
1341 | = (const char **) alloca ((noutputs) * sizeof (const char *)); | |
1342 | int i; | |
1343 | tree link; | |
1344 | const char *constraint; | |
1345 | bool allows_mem, allows_reg, is_inout; | |
a6d02559 DN |
1346 | |
1347 | for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link)) | |
1348 | { | |
1349 | oconstraints[i] = constraint | |
1350 | = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); | |
1351 | parse_output_constraint (&constraint, i, 0, 0, | |
1352 | &allows_mem, &allows_reg, &is_inout); | |
1353 | ||
a6d02559 | 1354 | /* This should have been split in gimplify_asm_expr. */ |
1e128c5f | 1355 | gcc_assert (!allows_reg || !is_inout); |
a6d02559 DN |
1356 | |
1357 | /* Memory operands are addressable. Note that STMT needs the | |
1358 | address of this operand. */ | |
1359 | if (!allows_reg && allows_mem) | |
1360 | { | |
1361 | tree t = get_base_address (TREE_VALUE (link)); | |
e8ca4159 DN |
1362 | if (t && DECL_P (t) && s_ann) |
1363 | add_to_addressable_set (t, &s_ann->addresses_taken); | |
a6d02559 DN |
1364 | } |
1365 | ||
1a24f92f | 1366 | get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def); |
a6d02559 DN |
1367 | } |
1368 | ||
1369 | for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link)) | |
1370 | { | |
1371 | constraint | |
1372 | = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); | |
1373 | parse_input_constraint (&constraint, 0, 0, noutputs, 0, | |
1374 | oconstraints, &allows_mem, &allows_reg); | |
1375 | ||
1376 | /* Memory operands are addressable. Note that STMT needs the | |
1377 | address of this operand. */ | |
1378 | if (!allows_reg && allows_mem) | |
1379 | { | |
1380 | tree t = get_base_address (TREE_VALUE (link)); | |
e8ca4159 DN |
1381 | if (t && DECL_P (t) && s_ann) |
1382 | add_to_addressable_set (t, &s_ann->addresses_taken); | |
a6d02559 DN |
1383 | } |
1384 | ||
1a24f92f | 1385 | get_expr_operands (stmt, &TREE_VALUE (link), 0); |
a6d02559 DN |
1386 | } |
1387 | ||
7c35745c | 1388 | |
a6d02559 | 1389 | /* Clobber memory for asm ("" : : : "memory"); */ |
7c35745c DN |
1390 | for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link)) |
1391 | if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0) | |
1392 | { | |
3cd8c58a | 1393 | unsigned i; |
87c476a2 | 1394 | bitmap_iterator bi; |
7c35745c | 1395 | |
7c35745c DN |
1396 | /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we |
1397 | decided to group them). */ | |
1398 | if (global_var) | |
e288e2f5 | 1399 | add_stmt_operand (&global_var, s_ann, opf_is_def); |
7c35745c | 1400 | else |
87c476a2 | 1401 | EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi) |
a3648cfc DB |
1402 | { |
1403 | tree var = referenced_var (i); | |
1404 | add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific); | |
1405 | } | |
a6d02559 | 1406 | |
7c35745c | 1407 | /* Now clobber all addressables. */ |
87c476a2 | 1408 | EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi) |
7c35745c DN |
1409 | { |
1410 | tree var = referenced_var (i); | |
d19e9499 DB |
1411 | |
1412 | /* Subvars are explicitly represented in this list, so | |
1413 | we don't need the original to be added to the clobber | |
1414 | ops, but the original *will* be in this list because | |
1415 | we keep the addressability of the original | |
1416 | variable up-to-date so we don't screw up the rest of | |
1417 | the backend. */ | |
1418 | if (var_can_have_subvars (var) | |
1419 | && get_subvars_for_var (var) != NULL) | |
1420 | continue; | |
1421 | ||
0d2bf6f0 | 1422 | add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific); |
87c476a2 | 1423 | } |
a6d02559 | 1424 | |
7c35745c DN |
1425 | break; |
1426 | } | |
a6d02559 DN |
1427 | } |
1428 | ||
7ccf35ed | 1429 | /* A subroutine of get_expr_operands to handle INDIRECT_REF, |
3c0b6c43 DB |
1430 | ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. |
1431 | STMT is the statement being processed, EXPR is the INDIRECT_REF | |
1432 | that got us here. FLAGS is as in get_expr_operands. | |
1433 | FULL_REF contains the full pointer dereference expression, if we | |
1434 | have it, or NULL otherwise. | |
1435 | OFFSET and SIZE are the location of the access inside the | |
1436 | dereferenced pointer, if known. | |
1437 | RECURSE_ON_BASE should be set to true if we want to continue | |
1438 | calling get_expr_operands on the base pointer, and false if | |
1439 | something else will do it for us. | |
1440 | ||
1441 | */ | |
310de761 RH |
1442 | |
1443 | static void | |
3c0b6c43 DB |
1444 | get_indirect_ref_operands (tree stmt, tree expr, int flags, |
1445 | tree full_ref, | |
1446 | HOST_WIDE_INT offset, HOST_WIDE_INT size, | |
1447 | bool recurse_on_base) | |
310de761 RH |
1448 | { |
1449 | tree *pptr = &TREE_OPERAND (expr, 0); | |
1450 | tree ptr = *pptr; | |
e288e2f5 | 1451 | stmt_ann_t s_ann = stmt_ann (stmt); |
1a24f92f | 1452 | |
50dc9a88 DN |
1453 | /* Stores into INDIRECT_REF operands are never killing definitions. */ |
1454 | flags &= ~opf_kill_def; | |
310de761 RH |
1455 | |
1456 | if (SSA_VAR_P (ptr)) | |
1457 | { | |
c1b763fa DN |
1458 | struct ptr_info_def *pi = NULL; |
1459 | ||
1460 | /* If PTR has flow-sensitive points-to information, use it. */ | |
1461 | if (TREE_CODE (ptr) == SSA_NAME | |
1462 | && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL | |
1463 | && pi->name_mem_tag) | |
310de761 | 1464 | { |
c1b763fa | 1465 | /* PTR has its own memory tag. Use it. */ |
3c0b6c43 DB |
1466 | add_virtual_operand (pi->name_mem_tag, s_ann, flags, |
1467 | full_ref, offset, size, false); | |
310de761 RH |
1468 | } |
1469 | else | |
1470 | { | |
c1b763fa DN |
1471 | /* If PTR is not an SSA_NAME or it doesn't have a name |
1472 | tag, use its type memory tag. */ | |
e288e2f5 | 1473 | var_ann_t v_ann; |
c1b763fa DN |
1474 | |
1475 | /* If we are emitting debugging dumps, display a warning if | |
1476 | PTR is an SSA_NAME with no flow-sensitive alias | |
1477 | information. That means that we may need to compute | |
1478 | aliasing again. */ | |
1479 | if (dump_file | |
1480 | && TREE_CODE (ptr) == SSA_NAME | |
1481 | && pi == NULL) | |
310de761 | 1482 | { |
c1b763fa DN |
1483 | fprintf (dump_file, |
1484 | "NOTE: no flow-sensitive alias info for "); | |
1485 | print_generic_expr (dump_file, ptr, dump_flags); | |
1486 | fprintf (dump_file, " in "); | |
1487 | print_generic_stmt (dump_file, stmt, dump_flags); | |
310de761 | 1488 | } |
310de761 | 1489 | |
c1b763fa DN |
1490 | if (TREE_CODE (ptr) == SSA_NAME) |
1491 | ptr = SSA_NAME_VAR (ptr); | |
e288e2f5 | 1492 | v_ann = var_ann (ptr); |
3c0b6c43 | 1493 | |
e288e2f5 | 1494 | if (v_ann->type_mem_tag) |
3c0b6c43 DB |
1495 | add_virtual_operand (v_ann->type_mem_tag, s_ann, flags, |
1496 | full_ref, offset, size, false); | |
310de761 RH |
1497 | } |
1498 | } | |
1499 | ||
1500 | /* If a constant is used as a pointer, we can't generate a real | |
1501 | operand for it but we mark the statement volatile to prevent | |
1502 | optimizations from messing things up. */ | |
1503 | else if (TREE_CODE (ptr) == INTEGER_CST) | |
1504 | { | |
e288e2f5 AM |
1505 | if (s_ann) |
1506 | s_ann->has_volatile_ops = true; | |
310de761 RH |
1507 | return; |
1508 | } | |
310de761 RH |
1509 | /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */ |
1510 | else | |
1e128c5f | 1511 | gcc_unreachable (); |
310de761 RH |
1512 | |
1513 | /* Add a USE operand for the base pointer. */ | |
3c0b6c43 DB |
1514 | if (recurse_on_base) |
1515 | get_expr_operands (stmt, pptr, opf_none); | |
310de761 RH |
1516 | } |
1517 | ||
ac182688 ZD |
1518 | /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ |
1519 | ||
1520 | static void | |
1521 | get_tmr_operands (tree stmt, tree expr, int flags) | |
1522 | { | |
9be7ee44 ZD |
1523 | tree tag = TMR_TAG (expr), ref; |
1524 | HOST_WIDE_INT offset, size, maxsize; | |
1525 | subvar_t svars, sv; | |
1526 | stmt_ann_t s_ann = stmt_ann (stmt); | |
ac182688 ZD |
1527 | |
1528 | /* First record the real operands. */ | |
1529 | get_expr_operands (stmt, &TMR_BASE (expr), opf_none); | |
1530 | get_expr_operands (stmt, &TMR_INDEX (expr), opf_none); | |
1531 | ||
1532 | /* MEM_REFs should never be killing. */ | |
1533 | flags &= ~opf_kill_def; | |
1534 | ||
1535 | if (TMR_SYMBOL (expr)) | |
e8ca4159 DN |
1536 | { |
1537 | stmt_ann_t ann = stmt_ann (stmt); | |
1538 | add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken); | |
1539 | } | |
ac182688 | 1540 | |
9be7ee44 ZD |
1541 | if (!tag) |
1542 | { | |
1543 | /* Something weird, so ensure that we will be careful. */ | |
1544 | stmt_ann (stmt)->has_volatile_ops = true; | |
1545 | return; | |
1546 | } | |
1547 | ||
1548 | if (DECL_P (tag)) | |
1549 | { | |
1550 | get_expr_operands (stmt, &tag, flags); | |
1551 | return; | |
1552 | } | |
1553 | ||
1554 | ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize); | |
1555 | gcc_assert (ref != NULL_TREE); | |
1556 | svars = get_subvars_for_var (ref); | |
1557 | for (sv = svars; sv; sv = sv->next) | |
1558 | { | |
1559 | bool exact; | |
3c0b6c43 | 1560 | if (overlap_subvar (offset, maxsize, sv->var, &exact)) |
9be7ee44 ZD |
1561 | { |
1562 | int subvar_flags = flags; | |
1563 | if (!exact || size != maxsize) | |
1564 | subvar_flags &= ~opf_kill_def; | |
1565 | add_stmt_operand (&sv->var, s_ann, subvar_flags); | |
1566 | } | |
1567 | } | |
ac182688 ZD |
1568 | } |
1569 | ||
310de761 RH |
1570 | /* A subroutine of get_expr_operands to handle CALL_EXPR. */ |
1571 | ||
1572 | static void | |
1a24f92f | 1573 | get_call_expr_operands (tree stmt, tree expr) |
310de761 RH |
1574 | { |
1575 | tree op; | |
1576 | int call_flags = call_expr_flags (expr); | |
1577 | ||
90c1d75a DN |
1578 | /* If aliases have been computed already, add V_MAY_DEF or V_USE |
1579 | operands for all the symbols that have been found to be | |
1580 | call-clobbered. | |
1581 | ||
1582 | Note that if aliases have not been computed, the global effects | |
1583 | of calls will not be included in the SSA web. This is fine | |
1584 | because no optimizer should run before aliases have been | |
1585 | computed. By not bothering with virtual operands for CALL_EXPRs | |
1586 | we avoid adding superfluous virtual operands, which can be a | |
1587 | significant compile time sink (See PR 15855). */ | |
dcd6de6d ZD |
1588 | if (aliases_computed_p |
1589 | && !bitmap_empty_p (call_clobbered_vars) | |
1590 | && !(call_flags & ECF_NOVOPS)) | |
310de761 | 1591 | { |
0bca51f0 | 1592 | /* A 'pure' or a 'const' function never call-clobbers anything. |
310de761 RH |
1593 | A 'noreturn' function might, but since we don't return anyway |
1594 | there is no point in recording that. */ | |
c597ef4e DN |
1595 | if (TREE_SIDE_EFFECTS (expr) |
1596 | && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN))) | |
ea900239 | 1597 | add_call_clobber_ops (stmt, get_callee_fndecl (expr)); |
c0e1b12f | 1598 | else if (!(call_flags & ECF_CONST)) |
d16a5e36 | 1599 | add_call_read_ops (stmt, get_callee_fndecl (expr)); |
310de761 | 1600 | } |
e288e2f5 AM |
1601 | |
1602 | /* Find uses in the called function. */ | |
1603 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none); | |
1604 | ||
1605 | for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op)) | |
1606 | get_expr_operands (stmt, &TREE_VALUE (op), opf_none); | |
1607 | ||
1608 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none); | |
1609 | ||
310de761 RH |
1610 | } |
1611 | ||
3c0b6c43 DB |
1612 | /* REF is a tree that contains the entire pointer dereference |
1613 | expression, if available, or NULL otherwise. ALIAS is the variable | |
1614 | we are asking if REF can access. OFFSET and SIZE come from the | |
1615 | memory access expression that generated this virtual operand. | |
1616 | FOR_CLOBBER is true is this is adding a virtual operand for a call | |
1617 | clobber. */ | |
1618 | ||
1619 | static bool | |
1620 | access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset, | |
1621 | HOST_WIDE_INT size) | |
1622 | { | |
1623 | bool offsetgtz = offset > 0; | |
1624 | unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset; | |
1625 | tree base = ref ? get_base_address (ref) : NULL; | |
1626 | ||
1627 | /* If ALIAS is an SFT, it can't be touched if the offset | |
1628 | and size of the access is not overlapping with the SFT offset and | |
1629 | size. This is only true if we are accessing through a pointer | |
1630 | to a type that is the same as SFT_PARENT_VAR. Otherwise, we may | |
1631 | be accessing through a pointer to some substruct of the | |
1632 | structure, and if we try to prune there, we will have the wrong | |
1633 | offset, and get the wrong answer. | |
1634 | i.e., we can't prune without more work if we have something like | |
1635 | struct gcc_target | |
1636 | { | |
1637 | struct asm_out | |
1638 | { | |
1639 | const char *byte_op; | |
1640 | struct asm_int_op | |
1641 | { | |
1642 | const char *hi; | |
1643 | } aligned_op; | |
1644 | } asm_out; | |
1645 | } targetm; | |
1646 | ||
1647 | foo = &targetm.asm_out.aligned_op; | |
1648 | return foo->hi; | |
1649 | ||
1650 | SFT.1, which represents hi, will have SFT_OFFSET=32 because in | |
1651 | terms of SFT_PARENT_VAR, that is where it is. | |
1652 | However, the access through the foo pointer will be at offset 0. | |
1653 | */ | |
943261d7 | 1654 | |
3c0b6c43 DB |
1655 | if (size != -1 |
1656 | && TREE_CODE (alias) == STRUCT_FIELD_TAG | |
1657 | && base | |
1658 | && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias)) | |
1659 | && !overlap_subvar (offset, size, alias, NULL)) | |
1660 | { | |
1661 | #ifdef ACCESS_DEBUGGING | |
1662 | fprintf (stderr, "Access to "); | |
1663 | print_generic_expr (stderr, ref, 0); | |
1664 | fprintf (stderr, " may not touch "); | |
1665 | print_generic_expr (stderr, alias, 0); | |
1666 | fprintf (stderr, " in function %s\n", get_name (current_function_decl)); | |
1667 | #endif | |
1668 | return false; | |
1669 | } | |
943261d7 | 1670 | |
3c0b6c43 DB |
1671 | /* Without strict aliasing, it is impossible for a component access |
1672 | through a pointer to touch a random variable, unless that | |
1673 | variable *is* a structure or a pointer. | |
6de9cd9a | 1674 | |
3c0b6c43 DB |
1675 | |
1676 | IE given p->c, and some random global variable b, | |
1677 | there is no legal way that p->c could be an access to b. | |
1678 | ||
1679 | Without strict aliasing on, we consider it legal to do something | |
1680 | like: | |
1681 | struct foos { int l; }; | |
1682 | int foo; | |
1683 | static struct foos *getfoo(void); | |
1684 | int main (void) | |
1685 | { | |
1686 | struct foos *f = getfoo(); | |
1687 | f->l = 1; | |
1688 | foo = 2; | |
1689 | if (f->l == 1) | |
1690 | abort(); | |
1691 | exit(0); | |
1692 | } | |
1693 | static struct foos *getfoo(void) | |
1694 | { return (struct foos *)&foo; } | |
1695 | ||
1696 | (taken from 20000623-1.c) | |
1697 | */ | |
943261d7 | 1698 | |
3c0b6c43 DB |
1699 | else if (ref |
1700 | && flag_strict_aliasing | |
1701 | && TREE_CODE (ref) != INDIRECT_REF | |
1702 | && !MTAG_P (alias) | |
1703 | && !AGGREGATE_TYPE_P (TREE_TYPE (alias)) | |
8afa305a | 1704 | && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE |
3c0b6c43 DB |
1705 | && !POINTER_TYPE_P (TREE_TYPE (alias))) |
1706 | { | |
1707 | #ifdef ACCESS_DEBUGGING | |
1708 | fprintf (stderr, "Access to "); | |
1709 | print_generic_expr (stderr, ref, 0); | |
1710 | fprintf (stderr, " may not touch "); | |
1711 | print_generic_expr (stderr, alias, 0); | |
1712 | fprintf (stderr, " in function %s\n", get_name (current_function_decl)); | |
1713 | #endif | |
1714 | return false; | |
1715 | } | |
943261d7 | 1716 | |
3c0b6c43 DB |
1717 | /* If the offset of the access is greater than the size of one of |
1718 | the possible aliases, it can't be touching that alias, because it | |
1719 | would be past the end of the structure. */ | |
943261d7 | 1720 | |
3c0b6c43 DB |
1721 | else if (ref |
1722 | && flag_strict_aliasing | |
1723 | && TREE_CODE (ref) != INDIRECT_REF | |
1724 | && !MTAG_P (alias) | |
1725 | && !POINTER_TYPE_P (TREE_TYPE (alias)) | |
1726 | && offsetgtz | |
1727 | && DECL_SIZE (alias) | |
1728 | && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST | |
1729 | && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias))) | |
1730 | { | |
1731 | #ifdef ACCESS_DEBUGGING | |
1732 | fprintf (stderr, "Access to "); | |
1733 | print_generic_expr (stderr, ref, 0); | |
1734 | fprintf (stderr, " may not touch "); | |
1735 | print_generic_expr (stderr, alias, 0); | |
1736 | fprintf (stderr, " in function %s\n", get_name (current_function_decl)); | |
1737 | #endif | |
1738 | return false; | |
1739 | } | |
1740 | return true; | |
1741 | } | |
6de9cd9a | 1742 | |
faf7c678 | 1743 | |
3c0b6c43 DB |
1744 | /* Add VAR to the virtual operands array. FLAGS is as in |
1745 | get_expr_operands. FULL_REF is a tree that contains the entire | |
1746 | pointer dereference expression, if available, or NULL otherwise. | |
1747 | OFFSET and SIZE come from the memory access expression that | |
1748 | generated this virtual operand. FOR_CLOBBER is true is this is | |
1749 | adding a virtual operand for a call clobber. */ | |
1750 | ||
1751 | static void | |
1752 | add_virtual_operand (tree var, stmt_ann_t s_ann, int flags, | |
1753 | tree full_ref, HOST_WIDE_INT offset, | |
1754 | HOST_WIDE_INT size, bool for_clobber) | |
6de9cd9a | 1755 | { |
3c0b6c43 DB |
1756 | VEC(tree,gc) *aliases; |
1757 | tree sym; | |
6de9cd9a | 1758 | var_ann_t v_ann; |
3c0b6c43 | 1759 | |
6de9cd9a DN |
1760 | sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var); |
1761 | v_ann = var_ann (sym); | |
3c0b6c43 | 1762 | |
e79b60a7 DN |
1763 | /* Mark statements with volatile operands. Optimizers should back |
1764 | off from statements having volatile operands. */ | |
1765 | if (TREE_THIS_VOLATILE (sym) && s_ann) | |
1766 | s_ann->has_volatile_ops = true; | |
6de9cd9a | 1767 | |
0bca51f0 DN |
1768 | /* If the variable cannot be modified and this is a V_MAY_DEF change |
1769 | it into a VUSE. This happens when read-only variables are marked | |
0fa2e4df | 1770 | call-clobbered and/or aliased to writable variables. So we only |
0d2bf6f0 RH |
1771 | check that this only happens on non-specific stores. |
1772 | ||
1773 | Note that if this is a specific store, i.e. associated with a | |
1774 | modify_expr, then we can't suppress the V_DEF, lest we run into | |
1775 | validation problems. | |
1776 | ||
1777 | This can happen when programs cast away const, leaving us with a | |
1778 | store to read-only memory. If the statement is actually executed | |
1779 | at runtime, then the program is ill formed. If the statement is | |
1780 | not executed then all is well. At the very least, we cannot ICE. */ | |
1781 | if ((flags & opf_non_specific) && unmodifiable_var_p (var)) | |
3c0b6c43 DB |
1782 | flags &= ~(opf_is_def | opf_kill_def); |
1783 | ||
3c0b6c43 DB |
1784 | /* The variable is not a GIMPLE register. Add it (or its aliases) to |
1785 | virtual operands, unless the caller has specifically requested | |
1786 | not to add virtual operands (used when adding operands inside an | |
1787 | ADDR_EXPR expression). */ | |
1788 | if (flags & opf_no_vops) | |
1789 | return; | |
1790 | ||
1791 | aliases = v_ann->may_aliases; | |
1792 | if (aliases == NULL) | |
6de9cd9a | 1793 | { |
3c0b6c43 | 1794 | /* The variable is not aliased or it is an alias tag. */ |
6de9cd9a | 1795 | if (flags & opf_is_def) |
3c0b6c43 DB |
1796 | { |
1797 | if (flags & opf_kill_def) | |
1798 | { | |
1799 | /* Only regular variables or struct fields may get a | |
1800 | V_MUST_DEF operand. */ | |
1801 | gcc_assert (!MTAG_P (var) | |
1802 | || TREE_CODE (var) == STRUCT_FIELD_TAG); | |
1803 | /* V_MUST_DEF for non-aliased, non-GIMPLE register | |
1804 | variable definitions. */ | |
1805 | append_v_must_def (var); | |
1806 | } | |
1807 | else | |
1808 | { | |
1809 | /* Add a V_MAY_DEF for call-clobbered variables and | |
1810 | memory tags. */ | |
1811 | append_v_may_def (var); | |
1812 | } | |
1813 | } | |
6de9cd9a | 1814 | else |
3c0b6c43 | 1815 | append_vuse (var); |
6de9cd9a DN |
1816 | } |
1817 | else | |
1818 | { | |
3c0b6c43 DB |
1819 | unsigned i; |
1820 | tree al; | |
1821 | ||
1822 | /* The variable is aliased. Add its aliases to the virtual | |
1823 | operands. */ | |
1824 | gcc_assert (VEC_length (tree, aliases) != 0); | |
1825 | ||
1826 | if (flags & opf_is_def) | |
1827 | { | |
1828 | ||
1829 | bool none_added = true; | |
6de9cd9a | 1830 | |
3c0b6c43 DB |
1831 | for (i = 0; VEC_iterate (tree, aliases, i, al); i++) |
1832 | { | |
1833 | if (!access_can_touch_variable (full_ref, al, offset, size)) | |
1834 | continue; | |
1835 | ||
1836 | none_added = false; | |
1837 | append_v_may_def (al); | |
1838 | } | |
6de9cd9a | 1839 | |
3c0b6c43 DB |
1840 | /* If the variable is also an alias tag, add a virtual |
1841 | operand for it, otherwise we will miss representing | |
1842 | references to the members of the variable's alias set. | |
1843 | This fixes the bug in gcc.c-torture/execute/20020503-1.c. | |
1844 | ||
1845 | It is also necessary to add bare defs on clobbers for | |
1846 | TMT's, so that bare TMT uses caused by pruning all the | |
1847 | aliases will link up properly with calls. */ | |
faf7c678 DN |
1848 | if (v_ann->is_aliased |
1849 | || none_added | |
3c0b6c43 | 1850 | || (TREE_CODE (var) == TYPE_MEMORY_TAG && for_clobber)) |
dd7b13d8 | 1851 | append_v_may_def (var); |
6de9cd9a DN |
1852 | } |
1853 | else | |
1854 | { | |
3c0b6c43 DB |
1855 | bool none_added = true; |
1856 | for (i = 0; VEC_iterate (tree, aliases, i, al); i++) | |
6de9cd9a | 1857 | { |
3c0b6c43 DB |
1858 | if (!access_can_touch_variable (full_ref, al, offset, size)) |
1859 | continue; | |
1860 | none_added = false; | |
1861 | append_vuse (al); | |
6de9cd9a | 1862 | } |
3c0b6c43 DB |
1863 | |
1864 | /* Similarly, append a virtual uses for VAR itself, when | |
1865 | it is an alias tag. */ | |
faf7c678 | 1866 | if (v_ann->is_aliased || none_added) |
dd7b13d8 | 1867 | append_vuse (var); |
6de9cd9a DN |
1868 | } |
1869 | } | |
1870 | } | |
1871 | ||
faf7c678 DN |
1872 | |
1873 | /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in | |
3c0b6c43 DB |
1874 | get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to |
1875 | the statement's real operands, otherwise it is added to virtual | |
1876 | operands. */ | |
1877 | ||
1878 | static void | |
1879 | add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags) | |
1880 | { | |
1881 | bool is_real_op; | |
1882 | tree var, sym; | |
1883 | var_ann_t v_ann; | |
1884 | ||
1885 | var = *var_p; | |
1886 | gcc_assert (SSA_VAR_P (var)); | |
1887 | ||
1888 | is_real_op = is_gimple_reg (var); | |
faf7c678 DN |
1889 | |
1890 | /* If this is a real operand, the operand is either an SSA name or a | |
1891 | decl. Virtual operands may only be decls. */ | |
3c0b6c43 DB |
1892 | gcc_assert (is_real_op || DECL_P (var)); |
1893 | ||
1894 | sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var); | |
1895 | v_ann = var_ann (sym); | |
1896 | ||
1897 | /* Mark statements with volatile operands. Optimizers should back | |
1898 | off from statements having volatile operands. */ | |
1899 | if (TREE_THIS_VOLATILE (sym) && s_ann) | |
1900 | s_ann->has_volatile_ops = true; | |
1901 | ||
1902 | if (is_real_op) | |
1903 | { | |
1904 | /* The variable is a GIMPLE register. Add it to real operands. */ | |
1905 | if (flags & opf_is_def) | |
1906 | append_def (var_p); | |
1907 | else | |
1908 | append_use (var_p); | |
1909 | } | |
1910 | else | |
1911 | add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false); | |
1912 | } | |
1913 | ||
e8ca4159 DN |
1914 | /* Add the base address of REF to the set *ADDRESSES_TAKEN. If |
1915 | *ADDRESSES_TAKEN is NULL, a new set is created. REF may be | |
1916 | a single variable whose address has been taken or any other valid | |
1917 | GIMPLE memory reference (structure reference, array, etc). If the | |
1918 | base address of REF is a decl that has sub-variables, also add all | |
1919 | of its sub-variables. */ | |
6de9cd9a | 1920 | |
e8ca4159 DN |
1921 | void |
1922 | add_to_addressable_set (tree ref, bitmap *addresses_taken) | |
6de9cd9a | 1923 | { |
e8ca4159 | 1924 | tree var; |
c75ab022 | 1925 | subvar_t svars; |
c75ab022 | 1926 | |
e8ca4159 DN |
1927 | gcc_assert (addresses_taken); |
1928 | ||
23e66a36 | 1929 | /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF |
e8ca4159 DN |
1930 | as the only thing we take the address of. If VAR is a structure, |
1931 | taking the address of a field means that the whole structure may | |
1932 | be referenced using pointer arithmetic. See PR 21407 and the | |
1933 | ensuing mailing list discussion. */ | |
1934 | var = get_base_address (ref); | |
6de9cd9a DN |
1935 | if (var && SSA_VAR_P (var)) |
1936 | { | |
e8ca4159 DN |
1937 | if (*addresses_taken == NULL) |
1938 | *addresses_taken = BITMAP_GGC_ALLOC (); | |
c75ab022 | 1939 | |
c75ab022 DB |
1940 | if (var_can_have_subvars (var) |
1941 | && (svars = get_subvars_for_var (var))) | |
1942 | { | |
1943 | subvar_t sv; | |
1944 | for (sv = svars; sv; sv = sv->next) | |
e8ca4159 DN |
1945 | { |
1946 | bitmap_set_bit (*addresses_taken, DECL_UID (sv->var)); | |
1947 | TREE_ADDRESSABLE (sv->var) = 1; | |
1948 | } | |
c75ab022 | 1949 | } |
9044951e | 1950 | else |
e8ca4159 DN |
1951 | { |
1952 | bitmap_set_bit (*addresses_taken, DECL_UID (var)); | |
1953 | TREE_ADDRESSABLE (var) = 1; | |
1954 | } | |
6de9cd9a DN |
1955 | } |
1956 | } | |
1957 | ||
6de9cd9a DN |
1958 | /* Add clobbering definitions for .GLOBAL_VAR or for each of the call |
1959 | clobbered variables in the function. */ | |
1960 | ||
1961 | static void | |
ea900239 | 1962 | add_call_clobber_ops (tree stmt, tree callee) |
6de9cd9a | 1963 | { |
f47c96aa | 1964 | unsigned u; |
e288e2f5 AM |
1965 | bitmap_iterator bi; |
1966 | stmt_ann_t s_ann = stmt_ann (stmt); | |
ea900239 | 1967 | bitmap not_read_b, not_written_b; |
d16a5e36 | 1968 | |
6de9cd9a DN |
1969 | /* Functions that are not const, pure or never return may clobber |
1970 | call-clobbered variables. */ | |
e288e2f5 AM |
1971 | if (s_ann) |
1972 | s_ann->makes_clobbering_call = true; | |
6de9cd9a | 1973 | |
e288e2f5 AM |
1974 | /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases |
1975 | for the heuristic used to decide whether to create .GLOBAL_VAR or not. */ | |
6de9cd9a | 1976 | if (global_var) |
6de9cd9a | 1977 | { |
e288e2f5 AM |
1978 | add_stmt_operand (&global_var, s_ann, opf_is_def); |
1979 | return; | |
1980 | } | |
6de9cd9a | 1981 | |
ea900239 DB |
1982 | /* Get info for local and module level statics. There is a bit |
1983 | set for each static if the call being processed does not read | |
1984 | or write that variable. */ | |
1985 | ||
1986 | not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL; | |
1987 | not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL; | |
e288e2f5 | 1988 | /* Add a V_MAY_DEF operand for every call clobbered variable. */ |
f47c96aa | 1989 | EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi) |
e288e2f5 | 1990 | { |
d16a5e36 DB |
1991 | tree var = referenced_var_lookup (u); |
1992 | unsigned int escape_mask = var_ann (var)->escape_mask; | |
1993 | tree real_var = var; | |
1994 | bool not_read; | |
1995 | bool not_written; | |
1996 | ||
1997 | /* Not read and not written are computed on regular vars, not | |
1998 | subvars, so look at the parent var if this is an SFT. */ | |
dff85230 | 1999 | |
d16a5e36 DB |
2000 | if (TREE_CODE (var) == STRUCT_FIELD_TAG) |
2001 | real_var = SFT_PARENT_VAR (var); | |
2002 | ||
2003 | not_read = not_read_b ? bitmap_bit_p (not_read_b, | |
2004 | DECL_UID (real_var)) : false; | |
2005 | not_written = not_written_b ? bitmap_bit_p (not_written_b, | |
2006 | DECL_UID (real_var)) : false; | |
2007 | gcc_assert (!unmodifiable_var_p (var)); | |
2008 | ||
2009 | clobber_stats.clobbered_vars++; | |
2010 | ||
2011 | /* See if this variable is really clobbered by this function. */ | |
2012 | ||
2013 | /* Trivial case: Things escaping only to pure/const are not | |
2014 | clobbered by non-pure-const, and only read by pure/const. */ | |
2015 | if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0) | |
ea900239 | 2016 | { |
d16a5e36 DB |
2017 | tree call = get_call_expr_in (stmt); |
2018 | if (call_expr_flags (call) & (ECF_CONST | ECF_PURE)) | |
ea900239 | 2019 | { |
d16a5e36 DB |
2020 | add_stmt_operand (&var, s_ann, opf_none); |
2021 | clobber_stats.unescapable_clobbers_avoided++; | |
2022 | continue; | |
ea900239 DB |
2023 | } |
2024 | else | |
d16a5e36 DB |
2025 | { |
2026 | clobber_stats.unescapable_clobbers_avoided++; | |
2027 | continue; | |
2028 | } | |
ea900239 | 2029 | } |
d16a5e36 DB |
2030 | |
2031 | if (not_written) | |
2032 | { | |
2033 | clobber_stats.static_write_clobbers_avoided++; | |
2034 | if (!not_read) | |
2035 | add_stmt_operand (&var, s_ann, opf_none); | |
2036 | else | |
2037 | clobber_stats.static_read_clobbers_avoided++; | |
2038 | } | |
2039 | else | |
3c0b6c43 DB |
2040 | add_virtual_operand (var, s_ann, opf_is_def, |
2041 | NULL, 0, -1, true); | |
e288e2f5 | 2042 | } |
d16a5e36 | 2043 | |
6de9cd9a DN |
2044 | } |
2045 | ||
2046 | ||
2047 | /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the | |
2048 | function. */ | |
2049 | ||
2050 | static void | |
d16a5e36 | 2051 | add_call_read_ops (tree stmt, tree callee) |
6de9cd9a | 2052 | { |
f47c96aa | 2053 | unsigned u; |
87c476a2 | 2054 | bitmap_iterator bi; |
e288e2f5 | 2055 | stmt_ann_t s_ann = stmt_ann (stmt); |
d16a5e36 | 2056 | bitmap not_read_b; |
87c476a2 | 2057 | |
e288e2f5 AM |
2058 | /* if the function is not pure, it may reference memory. Add |
2059 | a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var | |
2060 | for the heuristic used to decide whether to create .GLOBAL_VAR. */ | |
6de9cd9a | 2061 | if (global_var) |
6de9cd9a | 2062 | { |
e288e2f5 AM |
2063 | add_stmt_operand (&global_var, s_ann, opf_none); |
2064 | return; | |
2065 | } | |
2066 | ||
d16a5e36 | 2067 | not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL; |
e288e2f5 AM |
2068 | |
2069 | /* Add a VUSE for each call-clobbered variable. */ | |
f47c96aa | 2070 | EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi) |
e288e2f5 | 2071 | { |
f47c96aa | 2072 | tree var = referenced_var (u); |
d16a5e36 DB |
2073 | tree real_var = var; |
2074 | bool not_read; | |
2075 | ||
2076 | clobber_stats.readonly_clobbers++; | |
f47c96aa | 2077 | |
d16a5e36 DB |
2078 | /* Not read and not written are computed on regular vars, not |
2079 | subvars, so look at the parent var if this is an SFT. */ | |
e288e2f5 | 2080 | |
d16a5e36 DB |
2081 | if (TREE_CODE (var) == STRUCT_FIELD_TAG) |
2082 | real_var = SFT_PARENT_VAR (var); | |
6de9cd9a | 2083 | |
d16a5e36 DB |
2084 | not_read = not_read_b ? bitmap_bit_p (not_read_b, |
2085 | DECL_UID (real_var)) : false; | |
2086 | ||
2087 | if (not_read) | |
2088 | { | |
2089 | clobber_stats.static_readonly_clobbers_avoided++; | |
2090 | continue; | |
2091 | } | |
2092 | ||
2093 | add_stmt_operand (&var, s_ann, opf_none | opf_non_specific); | |
2094 | } | |
5f240ec4 ZD |
2095 | } |
2096 | ||
1a24f92f | 2097 | |
f430bae8 AM |
2098 | /* Scan the immediate_use list for VAR making sure its linked properly. |
2099 | return RTUE iof there is a problem. */ | |
2100 | ||
2101 | bool | |
2102 | verify_imm_links (FILE *f, tree var) | |
2103 | { | |
f47c96aa | 2104 | use_operand_p ptr, prev, list; |
f430bae8 AM |
2105 | int count; |
2106 | ||
2107 | gcc_assert (TREE_CODE (var) == SSA_NAME); | |
2108 | ||
2109 | list = &(SSA_NAME_IMM_USE_NODE (var)); | |
2110 | gcc_assert (list->use == NULL); | |
2111 | ||
2112 | if (list->prev == NULL) | |
2113 | { | |
2114 | gcc_assert (list->next == NULL); | |
2115 | return false; | |
2116 | } | |
2117 | ||
2118 | prev = list; | |
2119 | count = 0; | |
2120 | for (ptr = list->next; ptr != list; ) | |
2121 | { | |
2122 | if (prev != ptr->prev) | |
0e61db61 NS |
2123 | goto error; |
2124 | ||
f430bae8 | 2125 | if (ptr->use == NULL) |
0e61db61 NS |
2126 | goto error; /* 2 roots, or SAFE guard node. */ |
2127 | else if (*(ptr->use) != var) | |
2128 | goto error; | |
f430bae8 AM |
2129 | |
2130 | prev = ptr; | |
2131 | ptr = ptr->next; | |
e84d8064 AM |
2132 | /* Avoid infinite loops. 50,000,000 uses probably indicates a problem. */ |
2133 | if (count++ > 50000000) | |
0e61db61 | 2134 | goto error; |
f430bae8 AM |
2135 | } |
2136 | ||
2137 | /* Verify list in the other direction. */ | |
2138 | prev = list; | |
2139 | for (ptr = list->prev; ptr != list; ) | |
2140 | { | |
2141 | if (prev != ptr->next) | |
0e61db61 | 2142 | goto error; |
f430bae8 AM |
2143 | prev = ptr; |
2144 | ptr = ptr->prev; | |
2145 | if (count-- < 0) | |
0e61db61 | 2146 | goto error; |
f430bae8 AM |
2147 | } |
2148 | ||
2149 | if (count != 0) | |
0e61db61 | 2150 | goto error; |
f430bae8 AM |
2151 | |
2152 | return false; | |
0e61db61 NS |
2153 | |
2154 | error: | |
2155 | if (ptr->stmt && stmt_modified_p (ptr->stmt)) | |
2156 | { | |
2157 | fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt); | |
2158 | print_generic_stmt (f, ptr->stmt, TDF_SLIM); | |
2159 | } | |
2160 | fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, | |
2161 | (void *)ptr->use); | |
2162 | print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM); | |
2163 | fprintf(f, "\n"); | |
2164 | return true; | |
f430bae8 AM |
2165 | } |
2166 | ||
2167 | ||
2168 | /* Dump all the immediate uses to FILE. */ | |
2169 | ||
2170 | void | |
2171 | dump_immediate_uses_for (FILE *file, tree var) | |
2172 | { | |
2173 | imm_use_iterator iter; | |
2174 | use_operand_p use_p; | |
2175 | ||
2176 | gcc_assert (var && TREE_CODE (var) == SSA_NAME); | |
2177 | ||
2178 | print_generic_expr (file, var, TDF_SLIM); | |
2179 | fprintf (file, " : -->"); | |
2180 | if (has_zero_uses (var)) | |
2181 | fprintf (file, " no uses.\n"); | |
2182 | else | |
2183 | if (has_single_use (var)) | |
2184 | fprintf (file, " single use.\n"); | |
2185 | else | |
2186 | fprintf (file, "%d uses.\n", num_imm_uses (var)); | |
2187 | ||
2188 | FOR_EACH_IMM_USE_FAST (use_p, iter, var) | |
2189 | { | |
f47c96aa AM |
2190 | if (!is_gimple_reg (USE_FROM_PTR (use_p))) |
2191 | print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS); | |
2192 | else | |
2193 | print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM); | |
f430bae8 AM |
2194 | } |
2195 | fprintf(file, "\n"); | |
2196 | } | |
2197 | ||
2198 | /* Dump all the immediate uses to FILE. */ | |
2199 | ||
2200 | void | |
2201 | dump_immediate_uses (FILE *file) | |
2202 | { | |
2203 | tree var; | |
2204 | unsigned int x; | |
2205 | ||
2206 | fprintf (file, "Immediate_uses: \n\n"); | |
2207 | for (x = 1; x < num_ssa_names; x++) | |
2208 | { | |
2209 | var = ssa_name(x); | |
2210 | if (!var) | |
2211 | continue; | |
2212 | dump_immediate_uses_for (file, var); | |
2213 | } | |
2214 | } | |
2215 | ||
2216 | ||
2217 | /* Dump def-use edges on stderr. */ | |
2218 | ||
2219 | void | |
2220 | debug_immediate_uses (void) | |
2221 | { | |
2222 | dump_immediate_uses (stderr); | |
2223 | } | |
2224 | ||
2225 | /* Dump def-use edges on stderr. */ | |
2226 | ||
2227 | void | |
2228 | debug_immediate_uses_for (tree var) | |
2229 | { | |
2230 | dump_immediate_uses_for (stderr, var); | |
1a24f92f | 2231 | } |
6de9cd9a | 2232 | #include "gt-tree-ssa-operands.h" |