]> gcc.gnu.org Git - gcc.git/blame - gcc/ipa-modref-tree.h
Daily bump.
[gcc.git] / gcc / ipa-modref-tree.h
CommitLineData
d119f34c 1/* Data structure for the modref pass.
7adcbafe 2 Copyright (C) 2020-2022 Free Software Foundation, Inc.
d119f34c
JH
3 Contributed by David Cepelik and Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
c33f4742
JH
21/* modref_tree represent a decision tree that can be used by alias analysis
22 oracle to determine whether given memory access can be affected by a function
23 call. For every function we collect two trees, one for loads and other
24 for stores. Tree consist of following levels:
25
8a2fd716 26 1) Base: this level represent base alias set of the access and refers
c33f4742
JH
27 to sons (ref nodes). Flag all_refs means that all possible references
28 are aliasing.
29
8a2fd716 30 Because for LTO streaming we need to stream types rather than alias sets
c33f4742 31 modref_base_node is implemented as a template.
8a2fd716
JJ
32 2) Ref: this level represent ref alias set and links to accesses unless
33 all_refs flag is set.
c33f4742
JH
34 Again ref is an template to allow LTO streaming.
35 3) Access: this level represent info about individual accesses. Presently
8a2fd716 36 we record whether access is through a dereference of a function parameter
5c85f295 37 and if so we record the access range.
c33f4742
JH
38*/
39
d119f34c
JH
40#ifndef GCC_MODREF_TREE_H
41#define GCC_MODREF_TREE_H
42
43struct ipa_modref_summary;
44
1f3a3363
JH
45/* parm indexes greater than 0 are normal parms.
46 Some negative values have special meaning. */
47enum modref_special_parms {
48 MODREF_UNKNOWN_PARM = -1,
49 MODREF_STATIC_CHAIN_PARM = -2,
50 MODREF_RETSLOT_PARM = -3,
3305135c
JH
51 /* Used for bases that points to memory that escapes from function. */
52 MODREF_GLOBAL_MEMORY_PARM = -4,
02c80893
JJ
53 /* Used in modref_parm_map to take references which can be removed
54 from the summary during summary update since they now points to local
1f3a3363 55 memory. */
3305135c 56 MODREF_LOCAL_MEMORY_PARM = -5
1f3a3363
JH
57};
58
e30bf330
JH
59/* Modref record accesses relative to function parameters.
60 This is entry for single access specifying its base and access range.
61
62 Accesses can be collected to boundedly sized arrays using
63 modref_access_node::insert. */
c33f4742
JH
64struct GTY(()) modref_access_node
65{
c34db4b6
JH
66 /* Access range information (in bits). */
67 poly_int64 offset;
68 poly_int64 size;
69 poly_int64 max_size;
70
8a2fd716 71 /* Offset from parameter pointer to the base of the access (in bytes). */
c34db4b6
JH
72 poly_int64 parm_offset;
73
c33f4742
JH
74 /* Index of parameter which specifies the base of access. -1 if base is not
75 a function parameter. */
76 int parm_index;
c34db4b6 77 bool parm_offset_known;
5c85f295
JH
78 /* Number of times interval was extended during dataflow.
79 This has to be limited in order to keep dataflow finite. */
80 unsigned char adjustments;
c33f4742 81
a2917490 82 /* Return true if access node holds some useful info. */
c34db4b6 83 bool useful_p () const
c33f4742 84 {
1f3a3363 85 return parm_index != MODREF_UNKNOWN_PARM;
c33f4742 86 }
64f3e71c
JH
87 /* Return true if access can be used to determine a kill. */
88 bool useful_for_kill_p () const
89 {
90 return parm_offset_known && parm_index != MODREF_UNKNOWN_PARM
3305135c 91 && parm_index != MODREF_GLOBAL_MEMORY_PARM
64f3e71c 92 && parm_index != MODREF_RETSLOT_PARM && known_size_p (size)
2a1c3b69
RS
93 && known_eq (max_size, size)
94 && known_gt (size, 0);
64f3e71c 95 }
e30bf330
JH
96 /* Dump range to debug OUT. */
97 void dump (FILE *out);
c34db4b6 98 /* Return true if both accesses are the same. */
a246d723 99 bool operator == (modref_access_node &a) const;
e30bf330
JH
100 /* Return true if range info is useful. */
101 bool range_info_useful_p () const;
a2917490
JH
102 /* Return tree corresponding to parameter of the range in STMT. */
103 tree get_call_arg (const gcall *stmt) const;
02c80893 104 /* Build ao_ref corresponding to the access and return true if successful. */
a2917490 105 bool get_ao_ref (const gcall *stmt, class ao_ref *ref) const;
74509b96
JH
106 /* Stream access to OB. */
107 void stream_out (struct output_block *ob) const;
108 /* Stream access in from IB. */
109 static modref_access_node stream_in (struct lto_input_block *ib);
e30bf330
JH
110 /* Insert A into vector ACCESSES. Limit size of vector to MAX_ACCESSES and
111 if RECORD_ADJUSTMENT is true keep track of adjustment counts.
02c80893 112 Return 0 if nothing changed, 1 is insertion succeeded and -1 if failed. */
a246d723
JH
113 static int insert (vec <modref_access_node, va_gc> *&accesses,
114 modref_access_node a, size_t max_accesses,
115 bool record_adjustments);
64f3e71c
JH
116 /* Same as insert but for kills where we are conservative the other way
117 around: if information is lost, the kill is lost. */
118 static bool insert_kill (vec<modref_access_node> &kills,
119 modref_access_node &a, bool record_adjustments);
f5ff3a8e 120private:
a246d723 121 bool contains (const modref_access_node &) const;
64f3e71c 122 bool contains_for_kills (const modref_access_node &) const;
a246d723 123 void update (poly_int64, poly_int64, poly_int64, poly_int64, bool);
64f3e71c
JH
124 bool update_for_kills (poly_int64, poly_int64, poly_int64,
125 poly_int64, poly_int64, bool);
a246d723 126 bool merge (const modref_access_node &, bool);
64f3e71c 127 bool merge_for_kills (const modref_access_node &, bool);
a246d723
JH
128 static bool closer_pair_p (const modref_access_node &,
129 const modref_access_node &,
130 const modref_access_node &,
131 const modref_access_node &);
132 void forced_merge (const modref_access_node &, bool);
133 void update2 (poly_int64, poly_int64, poly_int64, poly_int64,
134 poly_int64, poly_int64, poly_int64, bool);
135 bool combined_offsets (const modref_access_node &,
136 poly_int64 *, poly_int64 *, poly_int64 *) const;
137 static void try_merge_with (vec <modref_access_node, va_gc> *&, size_t);
c33f4742 138};
d119f34c 139
c34db4b6
JH
140/* Access node specifying no useful info. */
141const modref_access_node unspecified_modref_access_node
1f3a3363 142 = {0, -1, -1, 0, MODREF_UNKNOWN_PARM, false, 0};
c34db4b6 143
d119f34c
JH
144template <typename T>
145struct GTY((user)) modref_ref_node
146{
147 T ref;
c33f4742
JH
148 bool every_access;
149 vec <modref_access_node, va_gc> *accesses;
d119f34c
JH
150
151 modref_ref_node (T ref):
c33f4742
JH
152 ref (ref),
153 every_access (false),
154 accesses (NULL)
d119f34c 155 {}
c33f4742 156
c33f4742
JH
157 /* Collapse the tree. */
158 void collapse ()
159 {
160 vec_free (accesses);
161 accesses = NULL;
162 every_access = true;
163 }
164
165 /* Insert access with OFFSET and SIZE.
5a90a186 166 Collapse tree if it has more than MAX_ACCESSES entries.
5c85f295 167 If RECORD_ADJUSTMENTs is true avoid too many interval extensions.
5a90a186 168 Return true if record was changed. */
5c85f295
JH
169 bool insert_access (modref_access_node a, size_t max_accesses,
170 bool record_adjustments)
c33f4742
JH
171 {
172 /* If this base->ref pair has no access information, bail out. */
173 if (every_access)
5a90a186 174 return false;
c33f4742 175
02c80893 176 /* Only the following kind of parameters needs to be tracked.
1f3a3363
JH
177 We do not track return slots because they are seen as a direct store
178 in the caller. */
179 gcc_checking_assert (a.parm_index >= 0
180 || a.parm_index == MODREF_STATIC_CHAIN_PARM
3305135c 181 || a.parm_index == MODREF_GLOBAL_MEMORY_PARM
1f3a3363 182 || a.parm_index == MODREF_UNKNOWN_PARM);
f075b8c5 183
6a649642
JH
184 if (!a.useful_p ())
185 {
186 if (!every_access)
187 {
188 collapse ();
189 return true;
190 }
191 return false;
192 }
193
a246d723
JH
194 int ret = modref_access_node::insert (accesses, a, max_accesses,
195 record_adjustments);
196 if (ret == -1)
c33f4742 197 {
a246d723 198 if (dump_file)
c33f4742 199 fprintf (dump_file,
1bc00a48 200 "--param modref-max-accesses limit reached; collapsing\n");
a246d723 201 collapse ();
c33f4742 202 }
a246d723 203 return ret != 0;
5c85f295 204 }
d119f34c
JH
205};
206
207/* Base of an access. */
208template <typename T>
209struct GTY((user)) modref_base_node
210{
211 T base;
212 vec <modref_ref_node <T> *, va_gc> *refs;
213 bool every_ref;
214
215 modref_base_node (T base):
216 base (base),
217 refs (NULL),
218 every_ref (false) {}
219
220 /* Search REF; return NULL if failed. */
221 modref_ref_node <T> *search (T ref)
222 {
223 size_t i;
224 modref_ref_node <T> *n;
225 FOR_EACH_VEC_SAFE_ELT (refs, i, n)
226 if (n->ref == ref)
227 return n;
228 return NULL;
229 }
230
5a90a186
JH
231 /* Insert REF; collapse tree if there are more than MAX_REFS.
232 Return inserted ref and if CHANGED is non-null set it to true if
233 something changed. */
234 modref_ref_node <T> *insert_ref (T ref, size_t max_refs,
235 bool *changed = NULL)
d119f34c
JH
236 {
237 modref_ref_node <T> *ref_node;
238
239 /* If the node is collapsed, don't do anything. */
240 if (every_ref)
241 return NULL;
242
d119f34c
JH
243 /* Otherwise, insert a node for the ref of the access under the base. */
244 ref_node = search (ref);
245 if (ref_node)
246 return ref_node;
247
e28ac73a
JH
248 /* We always allow inserting ref 0. For non-0 refs there is upper
249 limit on number of entries and if exceeded,
250 drop ref conservatively to 0. */
251 if (ref && refs && refs->length () >= max_refs)
d119f34c
JH
252 {
253 if (dump_file)
1bc00a48 254 fprintf (dump_file, "--param modref-max-refs limit reached;"
e28ac73a
JH
255 " using 0\n");
256 ref = 0;
257 ref_node = search (ref);
258 if (ref_node)
259 return ref_node;
d119f34c
JH
260 }
261
e28ac73a
JH
262 if (changed)
263 *changed = true;
264
d119f34c
JH
265 ref_node = new (ggc_alloc <modref_ref_node <T> > ())modref_ref_node <T>
266 (ref);
267 vec_safe_push (refs, ref_node);
268 return ref_node;
269 }
270
271 void collapse ()
272 {
c9da53d6
JH
273 size_t i;
274 modref_ref_node <T> *r;
275
276 if (refs)
277 {
278 FOR_EACH_VEC_SAFE_ELT (refs, i, r)
c33f4742
JH
279 {
280 r->collapse ();
281 ggc_free (r);
282 }
c9da53d6
JH
283 vec_free (refs);
284 }
d119f34c
JH
285 refs = NULL;
286 every_ref = true;
287 }
288};
289
c34db4b6
JH
290/* Map translating parameters across function call. */
291
292struct modref_parm_map
293{
74a4ece0
ML
294 /* Default constructor. */
295 modref_parm_map ()
296 : parm_index (MODREF_UNKNOWN_PARM), parm_offset_known (false), parm_offset ()
297 {}
298
c34db4b6 299 /* Index of parameter we translate to.
1f3a3363 300 Values from special_params enum are permitted too. */
c34db4b6
JH
301 int parm_index;
302 bool parm_offset_known;
303 poly_int64 parm_offset;
304};
305
d119f34c
JH
306/* Access tree for a single function. */
307template <typename T>
308struct GTY((user)) modref_tree
309{
310 vec <modref_base_node <T> *, va_gc> *bases;
d119f34c
JH
311 bool every_base;
312
8632f8c6 313 modref_tree ():
d119f34c 314 bases (NULL),
d119f34c
JH
315 every_base (false) {}
316
5a90a186
JH
317 /* Insert BASE; collapse tree if there are more than MAX_REFS.
318 Return inserted base and if CHANGED is non-null set it to true if
e28ac73a
JH
319 something changed.
320 If table gets full, try to insert REF instead. */
5a90a186 321
8632f8c6
JH
322 modref_base_node <T> *insert_base (T base, T ref,
323 unsigned int max_bases,
324 bool *changed = NULL)
d119f34c
JH
325 {
326 modref_base_node <T> *base_node;
327
328 /* If the node is collapsed, don't do anything. */
329 if (every_base)
330 return NULL;
331
332 /* Otherwise, insert a node for the base of the access into the tree. */
333 base_node = search (base);
334 if (base_node)
335 return base_node;
336
e28ac73a
JH
337 /* We always allow inserting base 0. For non-0 base there is upper
338 limit on number of entries and if exceeded,
339 drop base conservatively to ref and if it still does not fit to 0. */
340 if (base && bases && bases->length () >= max_bases)
d119f34c 341 {
e28ac73a
JH
342 base_node = search (ref);
343 if (base_node)
344 {
345 if (dump_file)
1bc00a48 346 fprintf (dump_file, "--param modref-max-bases"
e28ac73a
JH
347 " limit reached; using ref\n");
348 return base_node;
349 }
d119f34c 350 if (dump_file)
1bc00a48 351 fprintf (dump_file, "--param modref-max-bases"
e28ac73a
JH
352 " limit reached; using 0\n");
353 base = 0;
354 base_node = search (base);
355 if (base_node)
356 return base_node;
d119f34c
JH
357 }
358
e28ac73a
JH
359 if (changed)
360 *changed = true;
361
d119f34c
JH
362 base_node = new (ggc_alloc <modref_base_node <T> > ())
363 modref_base_node <T> (base);
364 vec_safe_push (bases, base_node);
365 return base_node;
366 }
367
5a90a186
JH
368 /* Insert memory access to the tree.
369 Return true if something changed. */
8632f8c6
JH
370 bool insert (unsigned int max_bases,
371 unsigned int max_refs,
372 unsigned int max_accesses,
373 T base, T ref, modref_access_node a,
5c85f295 374 bool record_adjustments)
d119f34c 375 {
5a90a186
JH
376 if (every_base)
377 return false;
378
379 bool changed = false;
380
5f377803
JH
381 /* We may end up with max_size being less than size for accesses past the
382 end of array. Those are undefined and safe to ignore. */
383 if (a.range_info_useful_p ()
384 && known_size_p (a.size) && known_size_p (a.max_size)
385 && known_lt (a.max_size, a.size))
386 {
387 if (dump_file)
388 fprintf (dump_file,
389 " - Paradoxical range. Ignoring\n");
390 return false;
391 }
392 if (known_size_p (a.size)
393 && known_eq (a.size, 0))
394 {
395 if (dump_file)
396 fprintf (dump_file,
397 " - Zero size. Ignoring\n");
398 return false;
399 }
400 if (known_size_p (a.max_size)
401 && known_eq (a.max_size, 0))
402 {
403 if (dump_file)
404 fprintf (dump_file,
405 " - Zero max_size. Ignoring\n");
406 return false;
407 }
408 gcc_checking_assert (!known_size_p (a.max_size)
409 || !known_le (a.max_size, 0));
410
c33f4742
JH
411 /* No useful information tracked; collapse everything. */
412 if (!base && !ref && !a.useful_p ())
d119f34c
JH
413 {
414 collapse ();
5a90a186 415 return true;
d119f34c 416 }
c33f4742 417
8632f8c6
JH
418 modref_base_node <T> *base_node
419 = insert_base (base, ref, max_bases, &changed);
e28ac73a
JH
420 base = base_node->base;
421 /* If table got full we may end up with useless base. */
422 if (!base && !ref && !a.useful_p ())
423 {
424 collapse ();
425 return true;
426 }
427 if (base_node->every_ref)
5a90a186
JH
428 return changed;
429 gcc_checking_assert (search (base) != NULL);
430
431 /* No useful ref info tracked; collapse base. */
432 if (!ref && !a.useful_p ())
433 {
434 base_node->collapse ();
435 return true;
436 }
d119f34c 437
8632f8c6
JH
438 modref_ref_node <T> *ref_node
439 = base_node->insert_ref (ref, max_refs, &changed);
e28ac73a 440 ref = ref_node->ref;
c33f4742 441
e28ac73a
JH
442 if (ref_node->every_access)
443 return changed;
444 changed |= ref_node->insert_access (a, max_accesses,
445 record_adjustments);
446 /* See if we failed to add useful access. */
447 if (ref_node->every_access)
d119f34c 448 {
e28ac73a
JH
449 /* Collapse everything if there is no useful base and ref. */
450 if (!base && !ref)
5a90a186
JH
451 {
452 collapse ();
453 gcc_checking_assert (changed);
454 }
e28ac73a
JH
455 /* Collapse base if there is no useful ref. */
456 else if (!ref)
c33f4742 457 {
e28ac73a
JH
458 base_node->collapse ();
459 gcc_checking_assert (changed);
c33f4742
JH
460 }
461 }
5a90a186 462 return changed;
d119f34c
JH
463 }
464
8632f8c6
JH
465 /* Insert memory access to the tree.
466 Return true if something changed. */
467 bool insert (tree fndecl,
468 T base, T ref, const modref_access_node &a,
469 bool record_adjustments)
470 {
471 return insert (opt_for_fn (fndecl, param_modref_max_bases),
472 opt_for_fn (fndecl, param_modref_max_refs),
473 opt_for_fn (fndecl, param_modref_max_accesses),
474 base, ref, a, record_adjustments);
475 }
476
8a2fd716 477 /* Remove tree branches that are not useful (i.e. they will always pass). */
c33f4742
JH
478
479 void cleanup ()
480 {
481 size_t i, j;
482 modref_base_node <T> *base_node;
483 modref_ref_node <T> *ref_node;
484
485 if (!bases)
486 return;
487
488 for (i = 0; vec_safe_iterate (bases, i, &base_node);)
489 {
490 if (base_node->refs)
491 for (j = 0; vec_safe_iterate (base_node->refs, j, &ref_node);)
492 {
493 if (!ref_node->every_access
494 && (!ref_node->accesses
495 || !ref_node->accesses->length ()))
496 {
497 base_node->refs->unordered_remove (j);
498 vec_free (ref_node->accesses);
499 ggc_delete (ref_node);
500 }
501 else
502 j++;
503 }
504 if (!base_node->every_ref
505 && (!base_node->refs || !base_node->refs->length ()))
506 {
507 bases->unordered_remove (i);
508 vec_free (base_node->refs);
509 ggc_delete (base_node);
510 }
511 else
512 i++;
513 }
514 if (bases && !bases->length ())
515 {
516 vec_free (bases);
517 bases = NULL;
518 }
519 }
520
521 /* Merge OTHER into the tree.
1f3a3363
JH
522 PARM_MAP, if non-NULL, maps parm indexes of callee to caller.
523 Similar CHAIN_MAP, if non-NULL, maps static chain of callee to caller.
5a90a186 524 Return true if something has changed. */
8632f8c6
JH
525 bool merge (unsigned int max_bases,
526 unsigned int max_refs,
527 unsigned int max_accesses,
528 modref_tree <T> *other, vec <modref_parm_map> *parm_map,
1f3a3363 529 modref_parm_map *static_chain_map,
3305135c
JH
530 bool record_accesses,
531 bool promote_unknown_to_global = false)
d119f34c 532 {
5a90a186
JH
533 if (!other || every_base)
534 return false;
d119f34c
JH
535 if (other->every_base)
536 {
537 collapse ();
5a90a186 538 return true;
d119f34c
JH
539 }
540
5a90a186 541 bool changed = false;
c33f4742 542 size_t i, j, k;
d119f34c 543 modref_base_node <T> *base_node, *my_base_node;
5a90a186 544 modref_ref_node <T> *ref_node;
c33f4742 545 modref_access_node *access_node;
5a90a186
JH
546 bool release = false;
547
548 /* For self-recursive functions we may end up merging summary into itself;
549 produce copy first so we do not modify summary under our own hands. */
550 if (other == this)
d119f34c 551 {
5a90a186 552 release = true;
8632f8c6 553 other = modref_tree<T>::create_ggc ();
5a90a186
JH
554 other->copy_from (this);
555 }
d119f34c 556
5a90a186
JH
557 FOR_EACH_VEC_SAFE_ELT (other->bases, i, base_node)
558 {
d119f34c
JH
559 if (base_node->every_ref)
560 {
8632f8c6
JH
561 my_base_node = insert_base (base_node->base, 0,
562 max_bases, &changed);
5a90a186 563 if (my_base_node && !my_base_node->every_ref)
c33f4742 564 {
5a90a186
JH
565 my_base_node->collapse ();
566 cleanup ();
567 changed = true;
c33f4742 568 }
5a90a186
JH
569 }
570 else
571 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
572 {
573 if (ref_node->every_access)
574 {
8632f8c6
JH
575 changed |= insert (max_bases, max_refs, max_accesses,
576 base_node->base,
c34db4b6 577 ref_node->ref,
5c85f295
JH
578 unspecified_modref_access_node,
579 record_accesses);
5a90a186
JH
580 }
581 else
582 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
c33f4742 583 {
5a90a186 584 modref_access_node a = *access_node;
c34db4b6 585
3305135c
JH
586 if (a.parm_index != MODREF_UNKNOWN_PARM
587 && a.parm_index != MODREF_GLOBAL_MEMORY_PARM
588 && parm_map)
5a90a186
JH
589 {
590 if (a.parm_index >= (int)parm_map->length ())
1f3a3363 591 a.parm_index = MODREF_UNKNOWN_PARM;
5a90a186 592 else
c34db4b6 593 {
1f3a3363
JH
594 modref_parm_map &m
595 = a.parm_index == MODREF_STATIC_CHAIN_PARM
596 ? *static_chain_map
597 : (*parm_map) [a.parm_index];
598 if (m.parm_index == MODREF_LOCAL_MEMORY_PARM)
599 continue;
600 a.parm_offset += m.parm_offset;
601 a.parm_offset_known &= m.parm_offset_known;
602 a.parm_index = m.parm_index;
c34db4b6 603 }
5a90a186 604 }
3305135c
JH
605 if (a.parm_index == MODREF_UNKNOWN_PARM
606 && promote_unknown_to_global)
607 a.parm_index = MODREF_GLOBAL_MEMORY_PARM;
8632f8c6
JH
608 changed |= insert (max_bases, max_refs, max_accesses,
609 base_node->base, ref_node->ref,
610 a, record_accesses);
c33f4742 611 }
5a90a186 612 }
d119f34c 613 }
5a90a186
JH
614 if (release)
615 ggc_delete (other);
616 return changed;
c33f4742
JH
617 }
618
8632f8c6
JH
619 /* Merge OTHER into the tree.
620 PARM_MAP, if non-NULL, maps parm indexes of callee to caller.
621 Similar CHAIN_MAP, if non-NULL, maps static chain of callee to caller.
622 Return true if something has changed. */
623 bool merge (tree fndecl,
624 modref_tree <T> *other, vec <modref_parm_map> *parm_map,
625 modref_parm_map *static_chain_map,
3305135c
JH
626 bool record_accesses,
627 bool promote_unknown_to_global = false)
8632f8c6
JH
628 {
629 return merge (opt_for_fn (fndecl, param_modref_max_bases),
630 opt_for_fn (fndecl, param_modref_max_refs),
631 opt_for_fn (fndecl, param_modref_max_accesses),
3305135c
JH
632 other, parm_map, static_chain_map, record_accesses,
633 promote_unknown_to_global);
8632f8c6
JH
634 }
635
c33f4742
JH
636 /* Copy OTHER to THIS. */
637 void copy_from (modref_tree <T> *other)
638 {
8632f8c6 639 merge (INT_MAX, INT_MAX, INT_MAX, other, NULL, NULL, false);
d119f34c
JH
640 }
641
642 /* Search BASE in tree; return NULL if failed. */
643 modref_base_node <T> *search (T base)
644 {
645 size_t i;
646 modref_base_node <T> *n;
647 FOR_EACH_VEC_SAFE_ELT (bases, i, n)
648 if (n->base == base)
649 return n;
650 return NULL;
651 }
652
008e7397
JH
653 /* Return true if tree contains access to global memory. */
654 bool global_access_p ()
655 {
656 size_t i, j, k;
657 modref_base_node <T> *base_node;
658 modref_ref_node <T> *ref_node;
659 modref_access_node *access_node;
660 if (every_base)
661 return true;
662 FOR_EACH_VEC_SAFE_ELT (bases, i, base_node)
663 {
664 if (base_node->every_ref)
665 return true;
666 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
667 {
668 if (ref_node->every_access)
669 return true;
670 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
3305135c
JH
671 if (access_node->parm_index == MODREF_UNKNOWN_PARM
672 || access_node->parm_index == MODREF_GLOBAL_MEMORY_PARM)
008e7397
JH
673 return true;
674 }
675 }
676 return false;
677 }
678
c9da53d6
JH
679 /* Return ggc allocated instance. We explicitly call destructors via
680 ggc_delete and do not want finalizers to be registered and
681 called at the garbage collection time. */
8632f8c6 682 static modref_tree<T> *create_ggc ()
c9da53d6
JH
683 {
684 return new (ggc_alloc_no_dtor<modref_tree<T>> ())
8632f8c6 685 modref_tree<T> ();
c9da53d6
JH
686 }
687
c33f4742 688 /* Remove all records and mark tree to alias with everything. */
d119f34c
JH
689 void collapse ()
690 {
c9da53d6
JH
691 size_t i;
692 modref_base_node <T> *n;
693
694 if (bases)
695 {
696 FOR_EACH_VEC_SAFE_ELT (bases, i, n)
697 {
698 n->collapse ();
699 ggc_free (n);
700 }
701 vec_free (bases);
702 }
d119f34c
JH
703 bases = NULL;
704 every_base = true;
705 }
c33f4742
JH
706
707 /* Release memory. */
c9da53d6
JH
708 ~modref_tree ()
709 {
710 collapse ();
711 }
fe90c504
JH
712
713 /* Update parameter indexes in TT according to MAP. */
714 void
715 remap_params (vec <int> *map)
716 {
717 size_t i;
718 modref_base_node <T> *base_node;
719 FOR_EACH_VEC_SAFE_ELT (bases, i, base_node)
720 {
721 size_t j;
722 modref_ref_node <T> *ref_node;
723 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
724 {
725 size_t k;
726 modref_access_node *access_node;
727 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
b406bb90 728 if (access_node->parm_index >= 0)
fe90c504
JH
729 {
730 if (access_node->parm_index < (int)map->length ())
731 access_node->parm_index = (*map)[access_node->parm_index];
732 else
1f3a3363 733 access_node->parm_index = MODREF_UNKNOWN_PARM;
fe90c504
JH
734 }
735 }
736 }
737 }
d119f34c
JH
738};
739
d119f34c
JH
740void gt_ggc_mx (modref_tree <int>* const&);
741void gt_ggc_mx (modref_tree <tree_node*>* const&);
742void gt_pch_nx (modref_tree <int>* const&);
743void gt_pch_nx (modref_tree <tree_node*>* const&);
744void gt_pch_nx (modref_tree <int>* const&, gt_pointer_operator op, void *cookie);
745void gt_pch_nx (modref_tree <tree_node*>* const&, gt_pointer_operator op,
746 void *cookie);
747
748void gt_ggc_mx (modref_base_node <int>*);
749void gt_ggc_mx (modref_base_node <tree_node*>* &);
750void gt_pch_nx (modref_base_node <int>* const&);
751void gt_pch_nx (modref_base_node <tree_node*>* const&);
752void gt_pch_nx (modref_base_node <int>* const&, gt_pointer_operator op,
753 void *cookie);
754void gt_pch_nx (modref_base_node <tree_node*>* const&, gt_pointer_operator op,
755 void *cookie);
756
757void gt_ggc_mx (modref_ref_node <int>*);
758void gt_ggc_mx (modref_ref_node <tree_node*>* &);
759void gt_pch_nx (modref_ref_node <int>* const&);
760void gt_pch_nx (modref_ref_node <tree_node*>* const&);
761void gt_pch_nx (modref_ref_node <int>* const&, gt_pointer_operator op,
762 void *cookie);
763void gt_pch_nx (modref_ref_node <tree_node*>* const&, gt_pointer_operator op,
764 void *cookie);
765
766#endif
This page took 1.137669 seconds and 5 git commands to generate.