]>
Commit | Line | Data |
---|---|---|
6de9cd9a | 1 | /* SSA operands management for trees. |
b5b8b0ac | 2 | Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 |
726a989a | 3 | Free Software Foundation, Inc. |
6de9cd9a DN |
4 | |
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9dcd6f09 | 9 | the Free Software Foundation; either version 3, or (at your option) |
6de9cd9a DN |
10 | any later version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
6de9cd9a DN |
20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
25 | #include "tree.h" | |
26 | #include "flags.h" | |
27 | #include "function.h" | |
28 | #include "diagnostic.h" | |
29 | #include "tree-flow.h" | |
30 | #include "tree-inline.h" | |
31 | #include "tree-pass.h" | |
32 | #include "ggc.h" | |
33 | #include "timevar.h" | |
4c714dd4 | 34 | #include "toplev.h" |
6674a6ce | 35 | #include "langhooks.h" |
ea900239 | 36 | #include "ipa-reference.h" |
1a24f92f | 37 | |
b8698a0f L |
38 | /* This file contains the code required to manage the operands cache of the |
39 | SSA optimizer. For every stmt, we maintain an operand cache in the stmt | |
40 | annotation. This cache contains operands that will be of interest to | |
41 | optimizers and other passes wishing to manipulate the IL. | |
1a24f92f | 42 | |
b8698a0f L |
43 | The operand type are broken up into REAL and VIRTUAL operands. The real |
44 | operands are represented as pointers into the stmt's operand tree. Thus | |
1a24f92f | 45 | any manipulation of the real operands will be reflected in the actual tree. |
b8698a0f L |
46 | Virtual operands are represented solely in the cache, although the base |
47 | variable for the SSA_NAME may, or may not occur in the stmt's tree. | |
1a24f92f AM |
48 | Manipulation of the virtual operands will not be reflected in the stmt tree. |
49 | ||
b8698a0f | 50 | The routines in this file are concerned with creating this operand cache |
1a24f92f AM |
51 | from a stmt tree. |
52 | ||
b8698a0f L |
53 | The operand tree is the parsed by the various get_* routines which look |
54 | through the stmt tree for the occurrence of operands which may be of | |
55 | interest, and calls are made to the append_* routines whenever one is | |
56 | found. There are 4 of these routines, each representing one of the | |
38635499 | 57 | 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs. |
1a24f92f | 58 | |
b8698a0f | 59 | The append_* routines check for duplication, and simply keep a list of |
1a24f92f AM |
60 | unique objects for each operand type in the build_* extendable vectors. |
61 | ||
b8698a0f L |
62 | Once the stmt tree is completely parsed, the finalize_ssa_operands() |
63 | routine is called, which proceeds to perform the finalization routine | |
38635499 | 64 | on each of the 4 operand vectors which have been built up. |
1a24f92f | 65 | |
b8698a0f L |
66 | If the stmt had a previous operand cache, the finalization routines |
67 | attempt to match up the new operands with the old ones. If it's a perfect | |
68 | match, the old vector is simply reused. If it isn't a perfect match, then | |
69 | a new vector is created and the new operands are placed there. For | |
70 | virtual operands, if the previous cache had SSA_NAME version of a | |
71 | variable, and that same variable occurs in the same operands cache, then | |
1a24f92f AM |
72 | the new cache vector will also get the same SSA_NAME. |
73 | ||
28f6b1e4 DN |
74 | i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new |
75 | operand vector for VUSE, then the new vector will also be modified | |
76 | such that it contains 'a_5' rather than 'a'. */ | |
1a24f92f | 77 | |
38635499 DN |
78 | /* Structure storing statistics on how many call clobbers we have, and |
79 | how many where avoided. */ | |
80 | ||
b8698a0f | 81 | static struct |
38635499 DN |
82 | { |
83 | /* Number of call-clobbered ops we attempt to add to calls in | |
84 | add_call_clobbered_mem_symbols. */ | |
85 | unsigned int clobbered_vars; | |
86 | ||
87 | /* Number of write-clobbers (VDEFs) avoided by using | |
88 | not_written information. */ | |
89 | unsigned int static_write_clobbers_avoided; | |
90 | ||
91 | /* Number of reads (VUSEs) avoided by using not_read information. */ | |
92 | unsigned int static_read_clobbers_avoided; | |
b8698a0f | 93 | |
38635499 DN |
94 | /* Number of write-clobbers avoided because the variable can't escape to |
95 | this call. */ | |
96 | unsigned int unescapable_clobbers_avoided; | |
97 | ||
98 | /* Number of read-only uses we attempt to add to calls in | |
99 | add_call_read_mem_symbols. */ | |
100 | unsigned int readonly_clobbers; | |
101 | ||
102 | /* Number of read-only uses we avoid using not_read information. */ | |
103 | unsigned int static_readonly_clobbers_avoided; | |
104 | } clobber_stats; | |
105 | ||
106 | ||
1e6a5d3c | 107 | /* Flags to describe operand properties in helpers. */ |
6de9cd9a DN |
108 | |
109 | /* By default, operands are loaded. */ | |
38635499 | 110 | #define opf_use 0 |
6de9cd9a | 111 | |
b8698a0f | 112 | /* Operand is the target of an assignment expression or a |
65ad7c63 | 113 | call-clobbered variable. */ |
38635499 | 114 | #define opf_def (1 << 0) |
a32b97a2 | 115 | |
6de9cd9a DN |
116 | /* No virtual operands should be created in the expression. This is used |
117 | when traversing ADDR_EXPR nodes which have different semantics than | |
118 | other expressions. Inside an ADDR_EXPR node, the only operands that we | |
119 | need to consider are indices into arrays. For instance, &a.b[i] should | |
120 | generate a USE of 'i' but it should not generate a VUSE for 'a' nor a | |
121 | VUSE for 'b'. */ | |
38635499 | 122 | #define opf_no_vops (1 << 1) |
6de9cd9a | 123 | |
38635499 | 124 | /* Operand is an implicit reference. This is used to distinguish |
726a989a | 125 | explicit assignments in the form of MODIFY_EXPR from |
38635499 DN |
126 | clobbering sites like function calls or ASM_EXPRs. */ |
127 | #define opf_implicit (1 << 2) | |
0d2bf6f0 | 128 | |
6de9cd9a | 129 | /* Array for building all the def operands. */ |
f3940b0e | 130 | static VEC(tree,heap) *build_defs; |
6de9cd9a DN |
131 | |
132 | /* Array for building all the use operands. */ | |
f3940b0e | 133 | static VEC(tree,heap) *build_uses; |
6de9cd9a | 134 | |
5006671f RG |
135 | /* The built VDEF operand. */ |
136 | static tree build_vdef; | |
6de9cd9a | 137 | |
5006671f RG |
138 | /* The built VUSE operand. */ |
139 | static tree build_vuse; | |
6de9cd9a | 140 | |
b8698a0f | 141 | /* Bitmap obstack for our datastructures that needs to survive across |
04b5b56c | 142 | compilations of multiple functions. */ |
497f1b81 | 143 | static bitmap_obstack operands_bitmap_obstack; |
6e7e772d | 144 | |
726a989a | 145 | static void get_expr_operands (gimple, tree *, int); |
02075bb2 | 146 | |
456cde30 JH |
147 | /* Number of functions with initialized ssa_operands. */ |
148 | static int n_initialized = 0; | |
1a24f92f | 149 | |
c83eecad | 150 | /* Return the DECL_UID of the base variable of T. */ |
1a24f92f | 151 | |
f47c96aa | 152 | static inline unsigned |
ed7a4b4b | 153 | get_name_decl (const_tree t) |
6de9cd9a | 154 | { |
f3940b0e AM |
155 | if (TREE_CODE (t) != SSA_NAME) |
156 | return DECL_UID (t); | |
157 | else | |
158 | return DECL_UID (SSA_NAME_VAR (t)); | |
6de9cd9a DN |
159 | } |
160 | ||
02075bb2 | 161 | |
65ad7c63 | 162 | /* Return true if the SSA operands cache is active. */ |
1a24f92f | 163 | |
f47c96aa AM |
164 | bool |
165 | ssa_operands_active (void) | |
6de9cd9a | 166 | { |
726a989a RB |
167 | /* This function may be invoked from contexts where CFUN is NULL |
168 | (IPA passes), return false for now. FIXME: operands may be | |
169 | active in each individual function, maybe this function should | |
170 | take CFUN as a parameter. */ | |
171 | if (cfun == NULL) | |
172 | return false; | |
173 | ||
456cde30 | 174 | return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active; |
f47c96aa | 175 | } |
6de9cd9a | 176 | |
b8698a0f | 177 | |
5006671f RG |
178 | /* Create the VOP variable, an artificial global variable to act as a |
179 | representative of all of the virtual operands FUD chain. */ | |
02075bb2 | 180 | |
5006671f RG |
181 | static void |
182 | create_vop_var (void) | |
79f99d42 | 183 | { |
5006671f RG |
184 | tree global_var; |
185 | ||
186 | gcc_assert (cfun->gimple_df->vop == NULL_TREE); | |
187 | ||
c2255bc4 AH |
188 | global_var = build_decl (BUILTINS_LOCATION, VAR_DECL, |
189 | get_identifier (".MEM"), | |
5006671f RG |
190 | void_type_node); |
191 | DECL_ARTIFICIAL (global_var) = 1; | |
192 | TREE_READONLY (global_var) = 0; | |
193 | DECL_EXTERNAL (global_var) = 1; | |
194 | TREE_STATIC (global_var) = 1; | |
195 | TREE_USED (global_var) = 1; | |
196 | DECL_CONTEXT (global_var) = NULL_TREE; | |
197 | TREE_THIS_VOLATILE (global_var) = 0; | |
198 | TREE_ADDRESSABLE (global_var) = 0; | |
199 | ||
200 | create_var_ann (global_var); | |
201 | add_referenced_var (global_var); | |
202 | cfun->gimple_df->vop = global_var; | |
79f99d42 | 203 | } |
79f99d42 | 204 | |
5006671f RG |
205 | /* These are the sizes of the operand memory buffer in bytes which gets |
206 | allocated each time more operands space is required. The final value is | |
207 | the amount that is allocated every time after that. | |
208 | In 1k we can fit 25 use operands (or 63 def operands) on a host with | |
209 | 8 byte pointers, that would be 10 statements each with 1 def and 2 | |
210 | uses. */ | |
b8698a0f | 211 | |
79f99d42 | 212 | #define OP_SIZE_INIT 0 |
5006671f RG |
213 | #define OP_SIZE_1 (1024 - sizeof (void *)) |
214 | #define OP_SIZE_2 (1024 * 4 - sizeof (void *)) | |
215 | #define OP_SIZE_3 (1024 * 16 - sizeof (void *)) | |
79f99d42 | 216 | |
f47c96aa AM |
217 | /* Initialize the operand cache routines. */ |
218 | ||
219 | void | |
220 | init_ssa_operands (void) | |
221 | { | |
456cde30 JH |
222 | if (!n_initialized++) |
223 | { | |
224 | build_defs = VEC_alloc (tree, heap, 5); | |
225 | build_uses = VEC_alloc (tree, heap, 10); | |
5006671f RG |
226 | build_vuse = NULL_TREE; |
227 | build_vdef = NULL_TREE; | |
497f1b81 | 228 | bitmap_obstack_initialize (&operands_bitmap_obstack); |
456cde30 JH |
229 | } |
230 | ||
231 | gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL); | |
497f1b81 JH |
232 | gimple_ssa_operands (cfun)->operand_memory_index |
233 | = gimple_ssa_operands (cfun)->ssa_operand_mem_size; | |
456cde30 | 234 | gimple_ssa_operands (cfun)->ops_active = true; |
d16a5e36 | 235 | memset (&clobber_stats, 0, sizeof (clobber_stats)); |
497f1b81 | 236 | gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT; |
5006671f | 237 | create_vop_var (); |
f47c96aa | 238 | } |
6de9cd9a | 239 | |
1a24f92f | 240 | |
f47c96aa AM |
241 | /* Dispose of anything required by the operand routines. */ |
242 | ||
243 | void | |
244 | fini_ssa_operands (void) | |
245 | { | |
246 | struct ssa_operand_memory_d *ptr; | |
38635499 | 247 | |
456cde30 JH |
248 | if (!--n_initialized) |
249 | { | |
250 | VEC_free (tree, heap, build_defs); | |
251 | VEC_free (tree, heap, build_uses); | |
5006671f RG |
252 | build_vdef = NULL_TREE; |
253 | build_vuse = NULL_TREE; | |
456cde30 | 254 | } |
38635499 | 255 | |
456cde30 JH |
256 | gimple_ssa_operands (cfun)->free_defs = NULL; |
257 | gimple_ssa_operands (cfun)->free_uses = NULL; | |
38635499 | 258 | |
456cde30 | 259 | while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL) |
f47c96aa | 260 | { |
456cde30 JH |
261 | gimple_ssa_operands (cfun)->operand_memory |
262 | = gimple_ssa_operands (cfun)->operand_memory->next; | |
f47c96aa | 263 | ggc_free (ptr); |
1a24f92f AM |
264 | } |
265 | ||
456cde30 | 266 | gimple_ssa_operands (cfun)->ops_active = false; |
38635499 | 267 | |
497f1b81 JH |
268 | if (!n_initialized) |
269 | bitmap_obstack_release (&operands_bitmap_obstack); | |
726a989a | 270 | |
5006671f RG |
271 | cfun->gimple_df->vop = NULL_TREE; |
272 | ||
d16a5e36 DB |
273 | if (dump_file && (dump_flags & TDF_STATS)) |
274 | { | |
38635499 | 275 | fprintf (dump_file, "Original clobbered vars: %d\n", |
02075bb2 | 276 | clobber_stats.clobbered_vars); |
38635499 | 277 | fprintf (dump_file, "Static write clobbers avoided: %d\n", |
02075bb2 | 278 | clobber_stats.static_write_clobbers_avoided); |
38635499 | 279 | fprintf (dump_file, "Static read clobbers avoided: %d\n", |
02075bb2 | 280 | clobber_stats.static_read_clobbers_avoided); |
38635499 | 281 | fprintf (dump_file, "Unescapable clobbers avoided: %d\n", |
02075bb2 | 282 | clobber_stats.unescapable_clobbers_avoided); |
38635499 | 283 | fprintf (dump_file, "Original read-only clobbers: %d\n", |
02075bb2 | 284 | clobber_stats.readonly_clobbers); |
38635499 | 285 | fprintf (dump_file, "Static read-only clobbers avoided: %d\n", |
02075bb2 | 286 | clobber_stats.static_readonly_clobbers_avoided); |
d16a5e36 | 287 | } |
f47c96aa | 288 | } |
1a24f92f | 289 | |
6de9cd9a | 290 | |
5006671f | 291 | /* Return memory for an operand of size SIZE. */ |
b8698a0f | 292 | |
f47c96aa AM |
293 | static inline void * |
294 | ssa_operand_alloc (unsigned size) | |
295 | { | |
296 | char *ptr; | |
38635499 | 297 | |
5006671f RG |
298 | gcc_assert (size == sizeof (struct use_optype_d) |
299 | || size == sizeof (struct def_optype_d)); | |
300 | ||
456cde30 | 301 | if (gimple_ssa_operands (cfun)->operand_memory_index + size |
497f1b81 | 302 | >= gimple_ssa_operands (cfun)->ssa_operand_mem_size) |
f47c96aa AM |
303 | { |
304 | struct ssa_operand_memory_d *ptr; | |
79f99d42 | 305 | |
5006671f RG |
306 | switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size) |
307 | { | |
308 | case OP_SIZE_INIT: | |
309 | gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1; | |
310 | break; | |
311 | case OP_SIZE_1: | |
312 | gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2; | |
313 | break; | |
314 | case OP_SIZE_2: | |
315 | case OP_SIZE_3: | |
316 | gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3; | |
317 | break; | |
318 | default: | |
319 | gcc_unreachable (); | |
320 | } | |
79f99d42 | 321 | |
b8698a0f | 322 | ptr = (struct ssa_operand_memory_d *) |
5006671f RG |
323 | ggc_alloc (sizeof (void *) |
324 | + gimple_ssa_operands (cfun)->ssa_operand_mem_size); | |
456cde30 JH |
325 | ptr->next = gimple_ssa_operands (cfun)->operand_memory; |
326 | gimple_ssa_operands (cfun)->operand_memory = ptr; | |
327 | gimple_ssa_operands (cfun)->operand_memory_index = 0; | |
f47c96aa | 328 | } |
5006671f | 329 | |
456cde30 JH |
330 | ptr = &(gimple_ssa_operands (cfun)->operand_memory |
331 | ->mem[gimple_ssa_operands (cfun)->operand_memory_index]); | |
332 | gimple_ssa_operands (cfun)->operand_memory_index += size; | |
f47c96aa | 333 | return ptr; |
6de9cd9a DN |
334 | } |
335 | ||
1a24f92f | 336 | |
79f99d42 AM |
337 | /* Allocate a DEF operand. */ |
338 | ||
38635499 DN |
339 | static inline struct def_optype_d * |
340 | alloc_def (void) | |
341 | { | |
342 | struct def_optype_d *ret; | |
343 | if (gimple_ssa_operands (cfun)->free_defs) | |
344 | { | |
345 | ret = gimple_ssa_operands (cfun)->free_defs; | |
346 | gimple_ssa_operands (cfun)->free_defs | |
347 | = gimple_ssa_operands (cfun)->free_defs->next; | |
348 | } | |
349 | else | |
350 | ret = (struct def_optype_d *) | |
79f99d42 | 351 | ssa_operand_alloc (sizeof (struct def_optype_d)); |
38635499 DN |
352 | return ret; |
353 | } | |
354 | ||
355 | ||
79f99d42 AM |
356 | /* Allocate a USE operand. */ |
357 | ||
38635499 DN |
358 | static inline struct use_optype_d * |
359 | alloc_use (void) | |
360 | { | |
361 | struct use_optype_d *ret; | |
362 | if (gimple_ssa_operands (cfun)->free_uses) | |
363 | { | |
364 | ret = gimple_ssa_operands (cfun)->free_uses; | |
365 | gimple_ssa_operands (cfun)->free_uses | |
366 | = gimple_ssa_operands (cfun)->free_uses->next; | |
367 | } | |
368 | else | |
79f99d42 AM |
369 | ret = (struct use_optype_d *) |
370 | ssa_operand_alloc (sizeof (struct use_optype_d)); | |
38635499 DN |
371 | return ret; |
372 | } | |
373 | ||
374 | ||
79f99d42 | 375 | /* Adds OP to the list of defs after LAST. */ |
5dc2e333 | 376 | |
b8698a0f | 377 | static inline def_optype_p |
79f99d42 | 378 | add_def_op (tree *op, def_optype_p last) |
ac574e1b | 379 | { |
c22940cd | 380 | def_optype_p new_def; |
ac574e1b | 381 | |
c22940cd TN |
382 | new_def = alloc_def (); |
383 | DEF_OP_PTR (new_def) = op; | |
384 | last->next = new_def; | |
385 | new_def->next = NULL; | |
386 | return new_def; | |
ac574e1b ZD |
387 | } |
388 | ||
79f99d42 AM |
389 | |
390 | /* Adds OP to the list of uses of statement STMT after LAST. */ | |
ac574e1b | 391 | |
38635499 | 392 | static inline use_optype_p |
726a989a | 393 | add_use_op (gimple stmt, tree *op, use_optype_p last) |
ac574e1b | 394 | { |
c22940cd TN |
395 | use_optype_p new_use; |
396 | ||
397 | new_use = alloc_use (); | |
398 | USE_OP_PTR (new_use)->use = op; | |
399 | link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt); | |
400 | last->next = new_use; | |
401 | new_use->next = NULL; | |
402 | return new_use; | |
ac574e1b ZD |
403 | } |
404 | ||
ac574e1b | 405 | |
ac574e1b | 406 | |
ac574e1b | 407 | /* Takes elements from build_defs and turns them into def operands of STMT. |
79f99d42 | 408 | TODO -- Make build_defs VEC of tree *. */ |
ac574e1b ZD |
409 | |
410 | static inline void | |
726a989a | 411 | finalize_ssa_defs (gimple stmt) |
ac574e1b ZD |
412 | { |
413 | unsigned new_i; | |
414 | struct def_optype_d new_list; | |
6677e189 | 415 | def_optype_p old_ops, last; |
79f99d42 AM |
416 | unsigned int num = VEC_length (tree, build_defs); |
417 | ||
418 | /* There should only be a single real definition per assignment. */ | |
726a989a | 419 | gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1); |
ac574e1b | 420 | |
5006671f RG |
421 | /* Pre-pend the vdef we may have built. */ |
422 | if (build_vdef != NULL_TREE) | |
423 | { | |
424 | tree oldvdef = gimple_vdef (stmt); | |
425 | if (oldvdef | |
426 | && TREE_CODE (oldvdef) == SSA_NAME) | |
427 | oldvdef = SSA_NAME_VAR (oldvdef); | |
428 | if (oldvdef != build_vdef) | |
429 | gimple_set_vdef (stmt, build_vdef); | |
430 | VEC_safe_insert (tree, heap, build_defs, 0, (tree)gimple_vdef_ptr (stmt)); | |
431 | ++num; | |
432 | } | |
433 | ||
ac574e1b ZD |
434 | new_list.next = NULL; |
435 | last = &new_list; | |
436 | ||
726a989a | 437 | old_ops = gimple_def_ops (stmt); |
ac574e1b ZD |
438 | |
439 | new_i = 0; | |
1a24f92f | 440 | |
5006671f RG |
441 | /* Clear and unlink a no longer necessary VDEF. */ |
442 | if (build_vdef == NULL_TREE | |
443 | && gimple_vdef (stmt) != NULL_TREE) | |
444 | { | |
445 | if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) | |
446 | { | |
447 | unlink_stmt_vdef (stmt); | |
448 | release_ssa_name (gimple_vdef (stmt)); | |
449 | } | |
450 | gimple_set_vdef (stmt, NULL_TREE); | |
451 | } | |
452 | ||
453 | /* If we have a non-SSA_NAME VDEF, mark it for renaming. */ | |
454 | if (gimple_vdef (stmt) | |
455 | && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME) | |
456 | mark_sym_for_renaming (gimple_vdef (stmt)); | |
457 | ||
79f99d42 AM |
458 | /* Check for the common case of 1 def that hasn't changed. */ |
459 | if (old_ops && old_ops->next == NULL && num == 1 | |
460 | && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops)) | |
461 | return; | |
ac574e1b ZD |
462 | |
463 | /* If there is anything in the old list, free it. */ | |
464 | if (old_ops) | |
465 | { | |
456cde30 JH |
466 | old_ops->next = gimple_ssa_operands (cfun)->free_defs; |
467 | gimple_ssa_operands (cfun)->free_defs = old_ops; | |
ac574e1b ZD |
468 | } |
469 | ||
79f99d42 AM |
470 | /* If there is anything remaining in the build_defs list, simply emit it. */ |
471 | for ( ; new_i < num; new_i++) | |
472 | last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last); | |
473 | ||
ac574e1b | 474 | /* Now set the stmt's operands. */ |
726a989a | 475 | gimple_set_def_ops (stmt, new_list.next); |
ac574e1b | 476 | } |
f47c96aa | 477 | |
6de9cd9a | 478 | |
ac574e1b | 479 | /* Takes elements from build_uses and turns them into use operands of STMT. |
6c00f606 | 480 | TODO -- Make build_uses VEC of tree *. */ |
ac574e1b ZD |
481 | |
482 | static inline void | |
726a989a | 483 | finalize_ssa_uses (gimple stmt) |
ac574e1b ZD |
484 | { |
485 | unsigned new_i; | |
486 | struct use_optype_d new_list; | |
487 | use_optype_p old_ops, ptr, last; | |
ac574e1b | 488 | |
5006671f RG |
489 | /* Pre-pend the VUSE we may have built. */ |
490 | if (build_vuse != NULL_TREE) | |
491 | { | |
492 | tree oldvuse = gimple_vuse (stmt); | |
493 | if (oldvuse | |
494 | && TREE_CODE (oldvuse) == SSA_NAME) | |
495 | oldvuse = SSA_NAME_VAR (oldvuse); | |
496 | if (oldvuse != (build_vuse != NULL_TREE | |
497 | ? build_vuse : build_vdef)) | |
498 | gimple_set_vuse (stmt, NULL_TREE); | |
499 | VEC_safe_insert (tree, heap, build_uses, 0, (tree)gimple_vuse_ptr (stmt)); | |
500 | } | |
501 | ||
ac574e1b ZD |
502 | new_list.next = NULL; |
503 | last = &new_list; | |
504 | ||
726a989a | 505 | old_ops = gimple_use_ops (stmt); |
ac574e1b | 506 | |
5006671f RG |
507 | /* Clear a no longer necessary VUSE. */ |
508 | if (build_vuse == NULL_TREE | |
509 | && gimple_vuse (stmt) != NULL_TREE) | |
510 | gimple_set_vuse (stmt, NULL_TREE); | |
511 | ||
ac574e1b ZD |
512 | /* If there is anything in the old list, free it. */ |
513 | if (old_ops) | |
514 | { | |
515 | for (ptr = old_ops; ptr; ptr = ptr->next) | |
516 | delink_imm_use (USE_OP_PTR (ptr)); | |
456cde30 JH |
517 | old_ops->next = gimple_ssa_operands (cfun)->free_uses; |
518 | gimple_ssa_operands (cfun)->free_uses = old_ops; | |
ac574e1b ZD |
519 | } |
520 | ||
5006671f RG |
521 | /* If we added a VUSE, make sure to set the operand if it is not already |
522 | present and mark it for renaming. */ | |
523 | if (build_vuse != NULL_TREE | |
524 | && gimple_vuse (stmt) == NULL_TREE) | |
525 | { | |
526 | gimple_set_vuse (stmt, gimple_vop (cfun)); | |
527 | mark_sym_for_renaming (gimple_vop (cfun)); | |
528 | } | |
529 | ||
6c00f606 AM |
530 | /* Now create nodes for all the new nodes. */ |
531 | for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++) | |
b8698a0f L |
532 | last = add_use_op (stmt, |
533 | (tree *) VEC_index (tree, build_uses, new_i), | |
79f99d42 | 534 | last); |
6c00f606 | 535 | |
ac574e1b | 536 | /* Now set the stmt's operands. */ |
726a989a | 537 | gimple_set_use_ops (stmt, new_list.next); |
6de9cd9a | 538 | } |
1a24f92f | 539 | |
38635499 DN |
540 | |
541 | /* Clear the in_list bits and empty the build array for VDEFs and | |
542 | VUSEs. */ | |
ac574e1b ZD |
543 | |
544 | static inline void | |
38635499 | 545 | cleanup_build_arrays (void) |
ac574e1b | 546 | { |
5006671f RG |
547 | build_vdef = NULL_TREE; |
548 | build_vuse = NULL_TREE; | |
38635499 DN |
549 | VEC_truncate (tree, build_defs, 0); |
550 | VEC_truncate (tree, build_uses, 0); | |
a32b97a2 BB |
551 | } |
552 | ||
6de9cd9a | 553 | |
1a24f92f | 554 | /* Finalize all the build vectors, fill the new ones into INFO. */ |
b8698a0f | 555 | |
1a24f92f | 556 | static inline void |
726a989a | 557 | finalize_ssa_stmt_operands (gimple stmt) |
1a24f92f | 558 | { |
f47c96aa AM |
559 | finalize_ssa_defs (stmt); |
560 | finalize_ssa_uses (stmt); | |
38635499 | 561 | cleanup_build_arrays (); |
6de9cd9a DN |
562 | } |
563 | ||
564 | ||
1a24f92f AM |
565 | /* Start the process of building up operands vectors in INFO. */ |
566 | ||
567 | static inline void | |
568 | start_ssa_stmt_operands (void) | |
6de9cd9a | 569 | { |
f3940b0e AM |
570 | gcc_assert (VEC_length (tree, build_defs) == 0); |
571 | gcc_assert (VEC_length (tree, build_uses) == 0); | |
5006671f RG |
572 | gcc_assert (build_vuse == NULL_TREE); |
573 | gcc_assert (build_vdef == NULL_TREE); | |
6de9cd9a DN |
574 | } |
575 | ||
576 | ||
1a24f92f | 577 | /* Add DEF_P to the list of pointers to operands. */ |
6de9cd9a DN |
578 | |
579 | static inline void | |
1a24f92f | 580 | append_def (tree *def_p) |
6de9cd9a | 581 | { |
38635499 | 582 | VEC_safe_push (tree, heap, build_defs, (tree) def_p); |
6de9cd9a DN |
583 | } |
584 | ||
585 | ||
1a24f92f | 586 | /* Add USE_P to the list of pointers to operands. */ |
6de9cd9a DN |
587 | |
588 | static inline void | |
1a24f92f | 589 | append_use (tree *use_p) |
6de9cd9a | 590 | { |
38635499 | 591 | VEC_safe_push (tree, heap, build_uses, (tree) use_p); |
6de9cd9a DN |
592 | } |
593 | ||
594 | ||
38635499 | 595 | /* Add VAR to the set of variables that require a VDEF operator. */ |
6de9cd9a | 596 | |
1a24f92f | 597 | static inline void |
38635499 | 598 | append_vdef (tree var) |
6de9cd9a | 599 | { |
c6803d43 RG |
600 | if (!optimize) |
601 | return; | |
602 | ||
5006671f RG |
603 | gcc_assert ((build_vdef == NULL_TREE |
604 | || build_vdef == var) | |
605 | && (build_vuse == NULL_TREE | |
606 | || build_vuse == var)); | |
38635499 | 607 | |
5006671f RG |
608 | build_vdef = var; |
609 | build_vuse = var; | |
6de9cd9a DN |
610 | } |
611 | ||
612 | ||
38635499 | 613 | /* Add VAR to the set of variables that require a VUSE operator. */ |
6de9cd9a | 614 | |
1a24f92f AM |
615 | static inline void |
616 | append_vuse (tree var) | |
6de9cd9a | 617 | { |
c6803d43 RG |
618 | if (!optimize) |
619 | return; | |
620 | ||
5006671f RG |
621 | gcc_assert (build_vuse == NULL_TREE |
622 | || build_vuse == var); | |
6de9cd9a | 623 | |
5006671f | 624 | build_vuse = var; |
f430bae8 AM |
625 | } |
626 | ||
5006671f | 627 | /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */ |
03c4c2e0 | 628 | |
5006671f RG |
629 | static void |
630 | add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags) | |
631 | { | |
632 | /* Add virtual operands to the stmt, unless the caller has specifically | |
633 | requested not to do that (used when adding operands inside an | |
634 | ADDR_EXPR expression). */ | |
635 | if (flags & opf_no_vops) | |
636 | return; | |
637 | ||
b5b8b0ac AO |
638 | gcc_assert (!is_gimple_debug (stmt)); |
639 | ||
5006671f RG |
640 | if (flags & opf_def) |
641 | append_vdef (gimple_vop (cfun)); | |
642 | else | |
643 | append_vuse (gimple_vop (cfun)); | |
f47c96aa AM |
644 | } |
645 | ||
f47c96aa | 646 | |
726a989a RB |
647 | /* Add *VAR_P to the appropriate operand array for statement STMT. |
648 | FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register, | |
649 | it will be added to the statement's real operands, otherwise it is | |
650 | added to virtual operands. */ | |
02075bb2 DN |
651 | |
652 | static void | |
726a989a | 653 | add_stmt_operand (tree *var_p, gimple stmt, int flags) |
f47c96aa | 654 | { |
02075bb2 | 655 | tree var, sym; |
f47c96aa | 656 | |
726a989a | 657 | gcc_assert (SSA_VAR_P (*var_p)); |
f47c96aa | 658 | |
38635499 | 659 | var = *var_p; |
02075bb2 | 660 | sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var); |
f47c96aa | 661 | |
38635499 DN |
662 | /* Mark statements with volatile operands. */ |
663 | if (TREE_THIS_VOLATILE (sym)) | |
726a989a | 664 | gimple_set_has_volatile_ops (stmt, true); |
f47c96aa | 665 | |
38635499 | 666 | if (is_gimple_reg (sym)) |
f47c96aa | 667 | { |
02075bb2 | 668 | /* The variable is a GIMPLE register. Add it to real operands. */ |
38635499 | 669 | if (flags & opf_def) |
02075bb2 DN |
670 | append_def (var_p); |
671 | else | |
672 | append_use (var_p); | |
f47c96aa | 673 | } |
02075bb2 | 674 | else |
5006671f | 675 | add_virtual_operand (stmt, flags); |
02075bb2 | 676 | } |
f47c96aa | 677 | |
ccacdf06 RG |
678 | /* Mark the base address of REF as having its address taken. |
679 | REF may be a single variable whose address has been taken or any | |
680 | other valid GIMPLE memory reference (structure reference, array, | |
681 | etc). */ | |
f47c96aa | 682 | |
02075bb2 | 683 | static void |
ccacdf06 | 684 | mark_address_taken (tree ref) |
28f6b1e4 | 685 | { |
5006671f | 686 | tree var; |
f47c96aa | 687 | |
5006671f RG |
688 | /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF |
689 | as the only thing we take the address of. If VAR is a structure, | |
690 | taking the address of a field means that the whole structure may | |
691 | be referenced using pointer arithmetic. See PR 21407 and the | |
692 | ensuing mailing list discussion. */ | |
693 | var = get_base_address (ref); | |
ccacdf06 RG |
694 | if (var && DECL_P (var)) |
695 | TREE_ADDRESSABLE (var) = 1; | |
f430bae8 AM |
696 | } |
697 | ||
28f6b1e4 | 698 | |
a509ebb5 | 699 | /* A subroutine of get_expr_operands to handle INDIRECT_REF, |
b8698a0f | 700 | ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. |
a509ebb5 RL |
701 | |
702 | STMT is the statement being processed, EXPR is the INDIRECT_REF | |
703 | that got us here. | |
b8698a0f | 704 | |
a509ebb5 RL |
705 | FLAGS is as in get_expr_operands. |
706 | ||
a509ebb5 RL |
707 | RECURSE_ON_BASE should be set to true if we want to continue |
708 | calling get_expr_operands on the base pointer, and false if | |
709 | something else will do it for us. */ | |
710 | ||
711 | static void | |
5006671f | 712 | get_indirect_ref_operands (gimple stmt, tree expr, int flags, |
28f6b1e4 | 713 | bool recurse_on_base) |
a509ebb5 RL |
714 | { |
715 | tree *pptr = &TREE_OPERAND (expr, 0); | |
a509ebb5 RL |
716 | |
717 | if (TREE_THIS_VOLATILE (expr)) | |
726a989a | 718 | gimple_set_has_volatile_ops (stmt, true); |
a509ebb5 | 719 | |
5006671f RG |
720 | /* Add the VOP. */ |
721 | add_virtual_operand (stmt, flags); | |
722 | ||
723 | /* If requested, add a USE operand for the base pointer. */ | |
724 | if (recurse_on_base) | |
b5b8b0ac AO |
725 | get_expr_operands (stmt, pptr, |
726 | opf_use | (flags & opf_no_vops)); | |
a509ebb5 | 727 | } |
643519b7 | 728 | |
28f6b1e4 | 729 | |
02075bb2 | 730 | /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */ |
6de9cd9a DN |
731 | |
732 | static void | |
726a989a | 733 | get_tmr_operands (gimple stmt, tree expr, int flags) |
6de9cd9a | 734 | { |
5e9fb3db RG |
735 | if (TREE_THIS_VOLATILE (expr)) |
736 | gimple_set_has_volatile_ops (stmt, true); | |
737 | ||
38635499 | 738 | /* First record the real operands. */ |
432b4b31 RG |
739 | get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops)); |
740 | get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops)); | |
6de9cd9a | 741 | |
02075bb2 | 742 | if (TMR_SYMBOL (expr)) |
ccacdf06 | 743 | mark_address_taken (TMR_SYMBOL (expr)); |
6de9cd9a | 744 | |
5006671f | 745 | add_virtual_operand (stmt, flags); |
02075bb2 DN |
746 | } |
747 | ||
748 | ||
726a989a RB |
749 | /* If STMT is a call that may clobber globals and other symbols that |
750 | escape, add them to the VDEF/VUSE lists for it. */ | |
02075bb2 DN |
751 | |
752 | static void | |
5006671f | 753 | maybe_add_call_vops (gimple stmt) |
02075bb2 | 754 | { |
726a989a | 755 | int call_flags = gimple_call_flags (stmt); |
02075bb2 | 756 | |
38635499 | 757 | /* If aliases have been computed already, add VDEF or VUSE |
02075bb2 | 758 | operands for all the symbols that have been found to be |
38635499 | 759 | call-clobbered. */ |
5006671f | 760 | if (!(call_flags & ECF_NOVOPS)) |
02075bb2 | 761 | { |
b8698a0f L |
762 | /* A 'pure' or a 'const' function never call-clobbers anything. |
763 | A 'noreturn' function might, but since we don't return anyway | |
764 | there is no point in recording that. */ | |
726a989a | 765 | if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN))) |
5006671f | 766 | add_virtual_operand (stmt, opf_def); |
02075bb2 | 767 | else if (!(call_flags & ECF_CONST)) |
5006671f | 768 | add_virtual_operand (stmt, opf_use); |
02075bb2 | 769 | } |
02075bb2 DN |
770 | } |
771 | ||
772 | ||
773 | /* Scan operands in the ASM_EXPR stmt referred to in INFO. */ | |
774 | ||
775 | static void | |
726a989a | 776 | get_asm_expr_operands (gimple stmt) |
02075bb2 | 777 | { |
726a989a | 778 | size_t i, noutputs; |
38635499 | 779 | const char **oconstraints; |
02075bb2 DN |
780 | const char *constraint; |
781 | bool allows_mem, allows_reg, is_inout; | |
38635499 | 782 | |
726a989a | 783 | noutputs = gimple_asm_noutputs (stmt); |
38635499 | 784 | oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); |
02075bb2 | 785 | |
38635499 | 786 | /* Gather all output operands. */ |
726a989a | 787 | for (i = 0; i < gimple_asm_noutputs (stmt); i++) |
02075bb2 | 788 | { |
726a989a | 789 | tree link = gimple_asm_output_op (stmt, i); |
65ad7c63 DN |
790 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
791 | oconstraints[i] = constraint; | |
792 | parse_output_constraint (&constraint, i, 0, 0, &allows_mem, | |
793 | &allows_reg, &is_inout); | |
02075bb2 DN |
794 | |
795 | /* This should have been split in gimplify_asm_expr. */ | |
796 | gcc_assert (!allows_reg || !is_inout); | |
797 | ||
798 | /* Memory operands are addressable. Note that STMT needs the | |
799 | address of this operand. */ | |
800 | if (!allows_reg && allows_mem) | |
801 | { | |
802 | tree t = get_base_address (TREE_VALUE (link)); | |
726a989a | 803 | if (t && DECL_P (t)) |
ccacdf06 | 804 | mark_address_taken (t); |
02075bb2 DN |
805 | } |
806 | ||
38635499 | 807 | get_expr_operands (stmt, &TREE_VALUE (link), opf_def); |
02075bb2 DN |
808 | } |
809 | ||
38635499 | 810 | /* Gather all input operands. */ |
726a989a | 811 | for (i = 0; i < gimple_asm_ninputs (stmt); i++) |
02075bb2 | 812 | { |
726a989a | 813 | tree link = gimple_asm_input_op (stmt, i); |
02075bb2 | 814 | constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); |
38635499 DN |
815 | parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints, |
816 | &allows_mem, &allows_reg); | |
02075bb2 DN |
817 | |
818 | /* Memory operands are addressable. Note that STMT needs the | |
819 | address of this operand. */ | |
820 | if (!allows_reg && allows_mem) | |
821 | { | |
822 | tree t = get_base_address (TREE_VALUE (link)); | |
726a989a | 823 | if (t && DECL_P (t)) |
ccacdf06 | 824 | mark_address_taken (t); |
02075bb2 DN |
825 | } |
826 | ||
827 | get_expr_operands (stmt, &TREE_VALUE (link), 0); | |
828 | } | |
829 | ||
38635499 | 830 | /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */ |
726a989a RB |
831 | for (i = 0; i < gimple_asm_nclobbers (stmt); i++) |
832 | { | |
833 | tree link = gimple_asm_clobber_op (stmt, i); | |
834 | if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0) | |
835 | { | |
5006671f | 836 | add_virtual_operand (stmt, opf_def); |
726a989a RB |
837 | break; |
838 | } | |
839 | } | |
65ad7c63 DN |
840 | } |
841 | ||
842 | ||
02075bb2 | 843 | /* Recursively scan the expression pointed to by EXPR_P in statement |
65ad7c63 DN |
844 | STMT. FLAGS is one of the OPF_* constants modifying how to |
845 | interpret the operands found. */ | |
02075bb2 DN |
846 | |
847 | static void | |
726a989a | 848 | get_expr_operands (gimple stmt, tree *expr_p, int flags) |
02075bb2 DN |
849 | { |
850 | enum tree_code code; | |
c22940cd | 851 | enum tree_code_class codeclass; |
02075bb2 | 852 | tree expr = *expr_p; |
b5b8b0ac | 853 | int uflags = opf_use; |
02075bb2 DN |
854 | |
855 | if (expr == NULL) | |
856 | return; | |
857 | ||
b5b8b0ac AO |
858 | if (is_gimple_debug (stmt)) |
859 | uflags |= (flags & opf_no_vops); | |
860 | ||
02075bb2 | 861 | code = TREE_CODE (expr); |
c22940cd | 862 | codeclass = TREE_CODE_CLASS (code); |
02075bb2 DN |
863 | |
864 | switch (code) | |
865 | { | |
866 | case ADDR_EXPR: | |
867 | /* Taking the address of a variable does not represent a | |
868 | reference to it, but the fact that the statement takes its | |
869 | address will be of interest to some passes (e.g. alias | |
870 | resolution). */ | |
b5b8b0ac AO |
871 | if (!is_gimple_debug (stmt)) |
872 | mark_address_taken (TREE_OPERAND (expr, 0)); | |
02075bb2 DN |
873 | |
874 | /* If the address is invariant, there may be no interesting | |
875 | variable references inside. */ | |
876 | if (is_gimple_min_invariant (expr)) | |
877 | return; | |
878 | ||
879 | /* Otherwise, there may be variables referenced inside but there | |
880 | should be no VUSEs created, since the referenced objects are | |
881 | not really accessed. The only operands that we should find | |
882 | here are ARRAY_REF indices which will always be real operands | |
883 | (GIMPLE does not allow non-registers as array indices). */ | |
884 | flags |= opf_no_vops; | |
885 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
886 | return; | |
887 | ||
888 | case SSA_NAME: | |
726a989a | 889 | add_stmt_operand (expr_p, stmt, flags); |
02075bb2 DN |
890 | return; |
891 | ||
892 | case VAR_DECL: | |
893 | case PARM_DECL: | |
894 | case RESULT_DECL: | |
726a989a | 895 | add_stmt_operand (expr_p, stmt, flags); |
5611cf0b | 896 | return; |
02075bb2 | 897 | |
0ca5af51 AO |
898 | case DEBUG_EXPR_DECL: |
899 | gcc_assert (gimple_debug_bind_p (stmt)); | |
900 | return; | |
901 | ||
02075bb2 DN |
902 | case MISALIGNED_INDIRECT_REF: |
903 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); | |
904 | /* fall through */ | |
905 | ||
906 | case ALIGN_INDIRECT_REF: | |
907 | case INDIRECT_REF: | |
5006671f | 908 | get_indirect_ref_operands (stmt, expr, flags, true); |
02075bb2 DN |
909 | return; |
910 | ||
911 | case TARGET_MEM_REF: | |
912 | get_tmr_operands (stmt, expr, flags); | |
913 | return; | |
914 | ||
02075bb2 | 915 | case ARRAY_REF: |
65ad7c63 | 916 | case ARRAY_RANGE_REF: |
02075bb2 DN |
917 | case COMPONENT_REF: |
918 | case REALPART_EXPR: | |
919 | case IMAGPART_EXPR: | |
920 | { | |
b65e51a8 | 921 | if (TREE_THIS_VOLATILE (expr)) |
726a989a | 922 | gimple_set_has_volatile_ops (stmt, true); |
b65e51a8 | 923 | |
38635499 | 924 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
b8698a0f | 925 | |
c75ab022 | 926 | if (code == COMPONENT_REF) |
305a1321 | 927 | { |
b65e51a8 | 928 | if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1))) |
726a989a | 929 | gimple_set_has_volatile_ops (stmt, true); |
b5b8b0ac | 930 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); |
305a1321 | 931 | } |
65ad7c63 | 932 | else if (code == ARRAY_REF || code == ARRAY_RANGE_REF) |
a916f21d | 933 | { |
b5b8b0ac AO |
934 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); |
935 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); | |
936 | get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags); | |
a916f21d | 937 | } |
643519b7 | 938 | |
c75ab022 DB |
939 | return; |
940 | } | |
643519b7 | 941 | |
d25cee4d | 942 | case WITH_SIZE_EXPR: |
0e28378a | 943 | /* WITH_SIZE_EXPR is a pass-through reference to its first argument, |
d25cee4d | 944 | and an rvalue reference to its second argument. */ |
b5b8b0ac | 945 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); |
1a24f92f | 946 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
d25cee4d RH |
947 | return; |
948 | ||
40923b20 | 949 | case COND_EXPR: |
ad9f20cb | 950 | case VEC_COND_EXPR: |
b5b8b0ac AO |
951 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags); |
952 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags); | |
953 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags); | |
40923b20 DP |
954 | return; |
955 | ||
7b48e1e0 RH |
956 | case CONSTRUCTOR: |
957 | { | |
958 | /* General aggregate CONSTRUCTORs have been decomposed, but they | |
959 | are still in use as the COMPLEX_EXPR equivalent for vectors. */ | |
4038c495 GB |
960 | constructor_elt *ce; |
961 | unsigned HOST_WIDE_INT idx; | |
7b48e1e0 | 962 | |
4038c495 GB |
963 | for (idx = 0; |
964 | VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce); | |
965 | idx++) | |
b5b8b0ac | 966 | get_expr_operands (stmt, &ce->value, uflags); |
7b48e1e0 RH |
967 | |
968 | return; | |
969 | } | |
970 | ||
310de761 | 971 | case BIT_FIELD_REF: |
f11bea25 JJ |
972 | if (TREE_THIS_VOLATILE (expr)) |
973 | gimple_set_has_volatile_ops (stmt, true); | |
974 | /* FALLTHRU */ | |
975 | ||
65ad7c63 | 976 | case TRUTH_NOT_EXPR: |
4626c433 | 977 | case VIEW_CONVERT_EXPR: |
310de761 | 978 | do_unary: |
1a24f92f | 979 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
6de9cd9a | 980 | return; |
6de9cd9a | 981 | |
310de761 RH |
982 | case TRUTH_AND_EXPR: |
983 | case TRUTH_OR_EXPR: | |
984 | case TRUTH_XOR_EXPR: | |
985 | case COMPOUND_EXPR: | |
986 | case OBJ_TYPE_REF: | |
0bca51f0 | 987 | case ASSERT_EXPR: |
310de761 RH |
988 | do_binary: |
989 | { | |
1a24f92f AM |
990 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); |
991 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); | |
310de761 RH |
992 | return; |
993 | } | |
994 | ||
20f06221 | 995 | case DOT_PROD_EXPR: |
7ccf35ed DN |
996 | case REALIGN_LOAD_EXPR: |
997 | { | |
998 | get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags); | |
999 | get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags); | |
1000 | get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags); | |
1001 | return; | |
1002 | } | |
1003 | ||
310de761 | 1004 | case FUNCTION_DECL: |
310de761 | 1005 | case LABEL_DECL: |
243cdfa8 | 1006 | case CONST_DECL: |
726a989a | 1007 | case CASE_LABEL_EXPR: |
02075bb2 | 1008 | /* Expressions that make no memory references. */ |
310de761 | 1009 | return; |
02075bb2 DN |
1010 | |
1011 | default: | |
c22940cd | 1012 | if (codeclass == tcc_unary) |
02075bb2 | 1013 | goto do_unary; |
c22940cd | 1014 | if (codeclass == tcc_binary || codeclass == tcc_comparison) |
02075bb2 | 1015 | goto do_binary; |
c22940cd | 1016 | if (codeclass == tcc_constant || codeclass == tcc_type) |
02075bb2 | 1017 | return; |
643519b7 | 1018 | } |
310de761 | 1019 | |
02075bb2 DN |
1020 | /* If we get here, something has gone wrong. */ |
1021 | #ifdef ENABLE_CHECKING | |
1022 | fprintf (stderr, "unhandled expression in get_expr_operands():\n"); | |
1023 | debug_tree (expr); | |
1024 | fputs ("\n", stderr); | |
1025 | #endif | |
1026 | gcc_unreachable (); | |
310de761 RH |
1027 | } |
1028 | ||
643519b7 | 1029 | |
65ad7c63 DN |
1030 | /* Parse STMT looking for operands. When finished, the various |
1031 | build_* operand vectors will have potential operands in them. */ | |
1032 | ||
ac182688 | 1033 | static void |
726a989a | 1034 | parse_ssa_operands (gimple stmt) |
ac182688 | 1035 | { |
726a989a | 1036 | enum gimple_code code = gimple_code (stmt); |
ac182688 | 1037 | |
726a989a RB |
1038 | if (code == GIMPLE_ASM) |
1039 | get_asm_expr_operands (stmt); | |
b5b8b0ac AO |
1040 | else if (is_gimple_debug (stmt)) |
1041 | { | |
1042 | if (gimple_debug_bind_p (stmt) | |
1043 | && gimple_debug_bind_has_value_p (stmt)) | |
1044 | get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt), | |
1045 | opf_use | opf_no_vops); | |
1046 | } | |
726a989a | 1047 | else |
02075bb2 | 1048 | { |
726a989a | 1049 | size_t i, start = 0; |
02075bb2 | 1050 | |
726a989a RB |
1051 | if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL) |
1052 | { | |
1053 | get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def); | |
1054 | start = 1; | |
1055 | } | |
02075bb2 | 1056 | |
726a989a RB |
1057 | for (i = start; i < gimple_num_ops (stmt); i++) |
1058 | get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use); | |
02075bb2 | 1059 | |
726a989a RB |
1060 | /* Add call-clobbered operands, if needed. */ |
1061 | if (code == GIMPLE_CALL) | |
5006671f | 1062 | maybe_add_call_vops (stmt); |
9be7ee44 | 1063 | } |
ac182688 ZD |
1064 | } |
1065 | ||
643519b7 | 1066 | |
02075bb2 | 1067 | /* Create an operands cache for STMT. */ |
310de761 RH |
1068 | |
1069 | static void | |
726a989a | 1070 | build_ssa_operands (gimple stmt) |
310de761 | 1071 | { |
ccacdf06 | 1072 | /* Initially assume that the statement has no volatile operands. */ |
726a989a | 1073 | gimple_set_has_volatile_ops (stmt, false); |
726a989a | 1074 | |
02075bb2 | 1075 | start_ssa_stmt_operands (); |
02075bb2 | 1076 | parse_ssa_operands (stmt); |
02075bb2 DN |
1077 | finalize_ssa_stmt_operands (stmt); |
1078 | } | |
e288e2f5 | 1079 | |
28f6b1e4 | 1080 | |
5f40b3cb ZD |
1081 | /* Releases the operands of STMT back to their freelists, and clears |
1082 | the stmt operand lists. */ | |
1083 | ||
1084 | void | |
726a989a | 1085 | free_stmt_operands (gimple stmt) |
5f40b3cb | 1086 | { |
726a989a RB |
1087 | def_optype_p defs = gimple_def_ops (stmt), last_def; |
1088 | use_optype_p uses = gimple_use_ops (stmt), last_use; | |
5f40b3cb ZD |
1089 | |
1090 | if (defs) | |
1091 | { | |
1092 | for (last_def = defs; last_def->next; last_def = last_def->next) | |
1093 | continue; | |
1094 | last_def->next = gimple_ssa_operands (cfun)->free_defs; | |
1095 | gimple_ssa_operands (cfun)->free_defs = defs; | |
726a989a | 1096 | gimple_set_def_ops (stmt, NULL); |
5f40b3cb ZD |
1097 | } |
1098 | ||
1099 | if (uses) | |
1100 | { | |
1101 | for (last_use = uses; last_use->next; last_use = last_use->next) | |
1102 | delink_imm_use (USE_OP_PTR (last_use)); | |
1103 | delink_imm_use (USE_OP_PTR (last_use)); | |
1104 | last_use->next = gimple_ssa_operands (cfun)->free_uses; | |
1105 | gimple_ssa_operands (cfun)->free_uses = uses; | |
726a989a | 1106 | gimple_set_use_ops (stmt, NULL); |
5f40b3cb ZD |
1107 | } |
1108 | ||
726a989a RB |
1109 | if (gimple_has_mem_ops (stmt)) |
1110 | { | |
5006671f RG |
1111 | gimple_set_vuse (stmt, NULL_TREE); |
1112 | gimple_set_vdef (stmt, NULL_TREE); | |
726a989a | 1113 | } |
310de761 RH |
1114 | } |
1115 | ||
3c0b6c43 | 1116 | |
2434ab1d | 1117 | /* Get the operands of statement STMT. */ |
643519b7 | 1118 | |
02075bb2 | 1119 | void |
726a989a | 1120 | update_stmt_operands (gimple stmt) |
02075bb2 | 1121 | { |
65ad7c63 DN |
1122 | /* If update_stmt_operands is called before SSA is initialized, do |
1123 | nothing. */ | |
02075bb2 DN |
1124 | if (!ssa_operands_active ()) |
1125 | return; | |
943261d7 | 1126 | |
02075bb2 | 1127 | timevar_push (TV_TREE_OPS); |
943261d7 | 1128 | |
726a989a | 1129 | gcc_assert (gimple_modified_p (stmt)); |
02075bb2 | 1130 | build_ssa_operands (stmt); |
726a989a | 1131 | gimple_set_modified (stmt, false); |
6de9cd9a | 1132 | |
02075bb2 DN |
1133 | timevar_pop (TV_TREE_OPS); |
1134 | } | |
faf7c678 | 1135 | |
65ad7c63 | 1136 | |
02075bb2 DN |
1137 | /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done |
1138 | to test the validity of the swap operation. */ | |
faf7c678 | 1139 | |
02075bb2 | 1140 | void |
726a989a | 1141 | swap_tree_operands (gimple stmt, tree *exp0, tree *exp1) |
02075bb2 DN |
1142 | { |
1143 | tree op0, op1; | |
1144 | op0 = *exp0; | |
1145 | op1 = *exp1; | |
3c0b6c43 | 1146 | |
65ad7c63 DN |
1147 | /* If the operand cache is active, attempt to preserve the relative |
1148 | positions of these two operands in their respective immediate use | |
1149 | lists. */ | |
02075bb2 DN |
1150 | if (ssa_operands_active () && op0 != op1) |
1151 | { | |
1152 | use_optype_p use0, use1, ptr; | |
1153 | use0 = use1 = NULL; | |
3c0b6c43 | 1154 | |
02075bb2 | 1155 | /* Find the 2 operands in the cache, if they are there. */ |
726a989a | 1156 | for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) |
02075bb2 DN |
1157 | if (USE_OP_PTR (ptr)->use == exp0) |
1158 | { | |
1159 | use0 = ptr; | |
1160 | break; | |
1161 | } | |
3c0b6c43 | 1162 | |
726a989a | 1163 | for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next) |
02075bb2 DN |
1164 | if (USE_OP_PTR (ptr)->use == exp1) |
1165 | { | |
1166 | use1 = ptr; | |
1167 | break; | |
1168 | } | |
1169 | ||
1170 | /* If both uses don't have operand entries, there isn't much we can do | |
65ad7c63 | 1171 | at this point. Presumably we don't need to worry about it. */ |
02075bb2 DN |
1172 | if (use0 && use1) |
1173 | { | |
1174 | tree *tmp = USE_OP_PTR (use1)->use; | |
1175 | USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use; | |
1176 | USE_OP_PTR (use0)->use = tmp; | |
1177 | } | |
3c0b6c43 | 1178 | } |
02075bb2 DN |
1179 | |
1180 | /* Now swap the data. */ | |
1181 | *exp0 = op1; | |
1182 | *exp1 = op0; | |
3c0b6c43 DB |
1183 | } |
1184 | ||
726a989a | 1185 | |
f430bae8 | 1186 | /* Scan the immediate_use list for VAR making sure its linked properly. |
65ad7c63 | 1187 | Return TRUE if there is a problem and emit an error message to F. */ |
f430bae8 AM |
1188 | |
1189 | bool | |
1190 | verify_imm_links (FILE *f, tree var) | |
1191 | { | |
f47c96aa | 1192 | use_operand_p ptr, prev, list; |
f430bae8 AM |
1193 | int count; |
1194 | ||
1195 | gcc_assert (TREE_CODE (var) == SSA_NAME); | |
1196 | ||
1197 | list = &(SSA_NAME_IMM_USE_NODE (var)); | |
1198 | gcc_assert (list->use == NULL); | |
1199 | ||
1200 | if (list->prev == NULL) | |
1201 | { | |
1202 | gcc_assert (list->next == NULL); | |
1203 | return false; | |
1204 | } | |
1205 | ||
1206 | prev = list; | |
1207 | count = 0; | |
1208 | for (ptr = list->next; ptr != list; ) | |
1209 | { | |
1210 | if (prev != ptr->prev) | |
0e61db61 | 1211 | goto error; |
b8698a0f | 1212 | |
f430bae8 | 1213 | if (ptr->use == NULL) |
0e61db61 NS |
1214 | goto error; /* 2 roots, or SAFE guard node. */ |
1215 | else if (*(ptr->use) != var) | |
1216 | goto error; | |
f430bae8 AM |
1217 | |
1218 | prev = ptr; | |
1219 | ptr = ptr->next; | |
643519b7 DN |
1220 | |
1221 | /* Avoid infinite loops. 50,000,000 uses probably indicates a | |
1222 | problem. */ | |
e84d8064 | 1223 | if (count++ > 50000000) |
0e61db61 | 1224 | goto error; |
f430bae8 AM |
1225 | } |
1226 | ||
1227 | /* Verify list in the other direction. */ | |
1228 | prev = list; | |
1229 | for (ptr = list->prev; ptr != list; ) | |
1230 | { | |
1231 | if (prev != ptr->next) | |
0e61db61 | 1232 | goto error; |
f430bae8 AM |
1233 | prev = ptr; |
1234 | ptr = ptr->prev; | |
1235 | if (count-- < 0) | |
0e61db61 | 1236 | goto error; |
f430bae8 AM |
1237 | } |
1238 | ||
1239 | if (count != 0) | |
0e61db61 | 1240 | goto error; |
f430bae8 AM |
1241 | |
1242 | return false; | |
0e61db61 NS |
1243 | |
1244 | error: | |
726a989a | 1245 | if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt)) |
0e61db61 | 1246 | { |
726a989a RB |
1247 | fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt); |
1248 | print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM); | |
0e61db61 | 1249 | } |
b8698a0f | 1250 | fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, |
0e61db61 NS |
1251 | (void *)ptr->use); |
1252 | print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM); | |
1253 | fprintf(f, "\n"); | |
1254 | return true; | |
f430bae8 AM |
1255 | } |
1256 | ||
1257 | ||
1258 | /* Dump all the immediate uses to FILE. */ | |
1259 | ||
1260 | void | |
1261 | dump_immediate_uses_for (FILE *file, tree var) | |
1262 | { | |
1263 | imm_use_iterator iter; | |
1264 | use_operand_p use_p; | |
1265 | ||
1266 | gcc_assert (var && TREE_CODE (var) == SSA_NAME); | |
1267 | ||
1268 | print_generic_expr (file, var, TDF_SLIM); | |
1269 | fprintf (file, " : -->"); | |
1270 | if (has_zero_uses (var)) | |
1271 | fprintf (file, " no uses.\n"); | |
1272 | else | |
1273 | if (has_single_use (var)) | |
1274 | fprintf (file, " single use.\n"); | |
1275 | else | |
1276 | fprintf (file, "%d uses.\n", num_imm_uses (var)); | |
1277 | ||
1278 | FOR_EACH_IMM_USE_FAST (use_p, iter, var) | |
1279 | { | |
726a989a | 1280 | if (use_p->loc.stmt == NULL && use_p->use == NULL) |
afd83fe4 | 1281 | fprintf (file, "***end of stmt iterator marker***\n"); |
f47c96aa | 1282 | else |
afd83fe4 | 1283 | if (!is_gimple_reg (USE_FROM_PTR (use_p))) |
726a989a | 1284 | print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS); |
afd83fe4 | 1285 | else |
726a989a | 1286 | print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM); |
f430bae8 AM |
1287 | } |
1288 | fprintf(file, "\n"); | |
1289 | } | |
1290 | ||
643519b7 | 1291 | |
f430bae8 AM |
1292 | /* Dump all the immediate uses to FILE. */ |
1293 | ||
1294 | void | |
1295 | dump_immediate_uses (FILE *file) | |
1296 | { | |
1297 | tree var; | |
1298 | unsigned int x; | |
1299 | ||
1300 | fprintf (file, "Immediate_uses: \n\n"); | |
1301 | for (x = 1; x < num_ssa_names; x++) | |
1302 | { | |
1303 | var = ssa_name(x); | |
1304 | if (!var) | |
1305 | continue; | |
1306 | dump_immediate_uses_for (file, var); | |
1307 | } | |
1308 | } | |
1309 | ||
1310 | ||
1311 | /* Dump def-use edges on stderr. */ | |
1312 | ||
1313 | void | |
1314 | debug_immediate_uses (void) | |
1315 | { | |
1316 | dump_immediate_uses (stderr); | |
1317 | } | |
1318 | ||
65ad7c63 | 1319 | |
f430bae8 AM |
1320 | /* Dump def-use edges on stderr. */ |
1321 | ||
1322 | void | |
1323 | debug_immediate_uses_for (tree var) | |
1324 | { | |
1325 | dump_immediate_uses_for (stderr, var); | |
1a24f92f | 1326 | } |
cfaab3a9 DN |
1327 | |
1328 | ||
5006671f RG |
1329 | /* Unlink STMTs virtual definition from the IL by propagating its use. */ |
1330 | ||
1331 | void | |
1332 | unlink_stmt_vdef (gimple stmt) | |
1333 | { | |
1334 | use_operand_p use_p; | |
1335 | imm_use_iterator iter; | |
1336 | gimple use_stmt; | |
1337 | tree vdef = gimple_vdef (stmt); | |
1338 | ||
1339 | if (!vdef | |
1340 | || TREE_CODE (vdef) != SSA_NAME) | |
1341 | return; | |
1342 | ||
1343 | FOR_EACH_IMM_USE_STMT (use_stmt, iter, gimple_vdef (stmt)) | |
1344 | { | |
1345 | FOR_EACH_IMM_USE_ON_STMT (use_p, iter) | |
1346 | SET_USE (use_p, gimple_vuse (stmt)); | |
1347 | } | |
cfaab3a9 | 1348 | |
5006671f RG |
1349 | if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt))) |
1350 | SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vuse (stmt)) = 1; | |
cfaab3a9 | 1351 | } |
5006671f | 1352 |