]> gcc.gnu.org Git - gcc.git/blob - gcc/value-range-storage.cc
clang warning: warning: private field 'm_gc' is not used [-Wunused-private-field]
[gcc.git] / gcc / value-range-storage.cc
1 /* Support routines for vrange storage.
2 Copyright (C) 2022-2023 Free Software Foundation, Inc.
3 Contributed by Aldy Hernandez <aldyh@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "ssa.h"
28 #include "tree-pretty-print.h"
29 #include "fold-const.h"
30 #include "gimple-range.h"
31 #include "value-range-storage.h"
32
33 // Generic memory allocator to share one interface between GC and
34 // obstack allocators.
35
36 class vrange_internal_alloc
37 {
38 public:
39 vrange_internal_alloc () { }
40 virtual ~vrange_internal_alloc () { }
41 virtual void *alloc (size_t size) = 0;
42 virtual void free (void *) = 0;
43 private:
44 DISABLE_COPY_AND_ASSIGN (vrange_internal_alloc);
45 };
46
47 class vrange_obstack_alloc final: public vrange_internal_alloc
48 {
49 public:
50 vrange_obstack_alloc ()
51 {
52 obstack_init (&m_obstack);
53 }
54 virtual ~vrange_obstack_alloc () final override
55 {
56 obstack_free (&m_obstack, NULL);
57 }
58 virtual void *alloc (size_t size) final override
59 {
60 return obstack_alloc (&m_obstack, size);
61 }
62 virtual void free (void *) final override { }
63 private:
64 obstack m_obstack;
65 };
66
67 class vrange_ggc_alloc final: public vrange_internal_alloc
68 {
69 public:
70 vrange_ggc_alloc () { }
71 virtual ~vrange_ggc_alloc () final override { }
72 virtual void *alloc (size_t size) final override
73 {
74 return ggc_internal_alloc (size);
75 }
76 virtual void free (void *p) final override
77 {
78 return ggc_free (p);
79 }
80 };
81
82 vrange_allocator::vrange_allocator (bool gc)
83 {
84 if (gc)
85 m_alloc = new vrange_ggc_alloc;
86 else
87 m_alloc = new vrange_obstack_alloc;
88 }
89
90 vrange_allocator::~vrange_allocator ()
91 {
92 delete m_alloc;
93 }
94
95 void *
96 vrange_allocator::alloc (size_t size)
97 {
98 return m_alloc->alloc (size);
99 }
100
101 void
102 vrange_allocator::free (void *p)
103 {
104 m_alloc->free (p);
105 }
106
107 // Allocate a new vrange_storage object initialized to R and return
108 // it.
109
110 vrange_storage *
111 vrange_allocator::clone (const vrange &r)
112 {
113 return vrange_storage::alloc (*m_alloc, r);
114 }
115
116 vrange_storage *
117 vrange_allocator::clone_varying (tree type)
118 {
119 if (irange::supports_p (type))
120 return irange_storage::alloc (*m_alloc, int_range <1> (type));
121 if (frange::supports_p (type))
122 return frange_storage::alloc (*m_alloc, frange (type));
123 return NULL;
124 }
125
126 vrange_storage *
127 vrange_allocator::clone_undefined (tree type)
128 {
129 if (irange::supports_p (type))
130 return irange_storage::alloc (*m_alloc, int_range<1> ());
131 if (frange::supports_p (type))
132 return frange_storage::alloc (*m_alloc, frange ());
133 return NULL;
134 }
135
136 // Allocate a new vrange_storage object initialized to R and return
137 // it. Return NULL if R is unsupported.
138
139 vrange_storage *
140 vrange_storage::alloc (vrange_internal_alloc &allocator, const vrange &r)
141 {
142 if (is_a <irange> (r))
143 return irange_storage::alloc (allocator, as_a <irange> (r));
144 if (is_a <frange> (r))
145 return frange_storage::alloc (allocator, as_a <frange> (r));
146 return NULL;
147 }
148
149 // Set storage to R.
150
151 void
152 vrange_storage::set_vrange (const vrange &r)
153 {
154 if (is_a <irange> (r))
155 {
156 irange_storage *s = static_cast <irange_storage *> (this);
157 gcc_checking_assert (s->fits_p (as_a <irange> (r)));
158 s->set_irange (as_a <irange> (r));
159 }
160 else if (is_a <frange> (r))
161 {
162 frange_storage *s = static_cast <frange_storage *> (this);
163 gcc_checking_assert (s->fits_p (as_a <frange> (r)));
164 s->set_frange (as_a <frange> (r));
165 }
166 else
167 gcc_unreachable ();
168 }
169
170 // Restore R from storage.
171
172 void
173 vrange_storage::get_vrange (vrange &r, tree type) const
174 {
175 if (is_a <irange> (r))
176 {
177 const irange_storage *s = static_cast <const irange_storage *> (this);
178 s->get_irange (as_a <irange> (r), type);
179 }
180 else if (is_a <frange> (r))
181 {
182 const frange_storage *s = static_cast <const frange_storage *> (this);
183 s->get_frange (as_a <frange> (r), type);
184 }
185 else
186 gcc_unreachable ();
187 }
188
189 // Return TRUE if storage can fit R.
190
191 bool
192 vrange_storage::fits_p (const vrange &r) const
193 {
194 if (is_a <irange> (r))
195 {
196 const irange_storage *s = static_cast <const irange_storage *> (this);
197 return s->fits_p (as_a <irange> (r));
198 }
199 if (is_a <frange> (r))
200 {
201 const frange_storage *s = static_cast <const frange_storage *> (this);
202 return s->fits_p (as_a <frange> (r));
203 }
204 gcc_unreachable ();
205 return false;
206 }
207
208 // Return TRUE if the range in storage is equal to R.
209
210 bool
211 vrange_storage::equal_p (const vrange &r, tree type) const
212 {
213 if (is_a <irange> (r))
214 {
215 const irange_storage *s = static_cast <const irange_storage *> (this);
216 return s->equal_p (as_a <irange> (r), type);
217 }
218 if (is_a <frange> (r))
219 {
220 const frange_storage *s = static_cast <const frange_storage *> (this);
221 return s->equal_p (as_a <frange> (r), type);
222 }
223 gcc_unreachable ();
224 }
225
226 //============================================================================
227 // irange_storage implementation
228 //============================================================================
229
230 unsigned char *
231 irange_storage::write_lengths_address ()
232 {
233 return (unsigned char *)&m_val[(m_num_ranges * 2 + 1)
234 * WIDE_INT_MAX_HWIS (m_precision)];
235 }
236
237 const unsigned char *
238 irange_storage::lengths_address () const
239 {
240 return const_cast <irange_storage *> (this)->write_lengths_address ();
241 }
242
243 // Allocate a new irange_storage object initialized to R.
244
245 irange_storage *
246 irange_storage::alloc (vrange_internal_alloc &allocator, const irange &r)
247 {
248 size_t size = irange_storage::size (r);
249 irange_storage *p = static_cast <irange_storage *> (allocator.alloc (size));
250 new (p) irange_storage (r);
251 return p;
252 }
253
254 // Initialize the storage with R.
255
256 irange_storage::irange_storage (const irange &r)
257 : m_max_ranges (r.num_pairs ())
258 {
259 m_num_ranges = m_max_ranges;
260 set_irange (r);
261 }
262
263 static inline void
264 write_wide_int (HOST_WIDE_INT *&val, unsigned char *&len, const wide_int &w)
265 {
266 *len = w.get_len ();
267 for (unsigned i = 0; i < *len; ++i)
268 *val++ = w.elt (i);
269 ++len;
270 }
271
272 // Store R into the current storage.
273
274 void
275 irange_storage::set_irange (const irange &r)
276 {
277 gcc_checking_assert (fits_p (r));
278
279 if (r.undefined_p ())
280 {
281 m_kind = VR_UNDEFINED;
282 return;
283 }
284 if (r.varying_p ())
285 {
286 m_kind = VR_VARYING;
287 return;
288 }
289
290 m_precision = TYPE_PRECISION (r.type ());
291 m_num_ranges = r.num_pairs ();
292 m_kind = VR_RANGE;
293
294 HOST_WIDE_INT *val = &m_val[0];
295 unsigned char *len = write_lengths_address ();
296
297 for (unsigned i = 0; i < r.num_pairs (); ++i)
298 {
299 write_wide_int (val, len, r.lower_bound (i));
300 write_wide_int (val, len, r.upper_bound (i));
301 }
302 write_wide_int (val, len, r.m_nonzero_mask);
303
304 if (flag_checking)
305 {
306 int_range_max tmp;
307 get_irange (tmp, r.type ());
308 gcc_checking_assert (tmp == r);
309 }
310 }
311
312 static inline void
313 read_wide_int (wide_int &w,
314 const HOST_WIDE_INT *val, unsigned char len, unsigned prec)
315 {
316 trailing_wide_int_storage stow (prec, &len,
317 const_cast <HOST_WIDE_INT *> (val));
318 w = trailing_wide_int (stow);
319 }
320
321 // Restore a range of TYPE from storage into R.
322
323 void
324 irange_storage::get_irange (irange &r, tree type) const
325 {
326 if (m_kind == VR_UNDEFINED)
327 {
328 r.set_undefined ();
329 return;
330 }
331 if (m_kind == VR_VARYING)
332 {
333 r.set_varying (type);
334 return;
335 }
336
337 gcc_checking_assert (TYPE_PRECISION (type) == m_precision);
338 const HOST_WIDE_INT *val = &m_val[0];
339 const unsigned char *len = lengths_address ();
340
341 // Handle the common case where R can fit the new range.
342 if (r.m_max_ranges >= m_num_ranges)
343 {
344 r.m_kind = VR_RANGE;
345 r.m_num_ranges = m_num_ranges;
346 r.m_type = type;
347 for (unsigned i = 0; i < m_num_ranges * 2; ++i)
348 {
349 read_wide_int (r.m_base[i], val, *len, m_precision);
350 val += *len++;
351 }
352 }
353 // Otherwise build the range piecewise.
354 else
355 {
356 r.set_undefined ();
357 for (unsigned i = 0; i < m_num_ranges; ++i)
358 {
359 wide_int lb, ub;
360 read_wide_int (lb, val, *len, m_precision);
361 val += *len++;
362 read_wide_int (ub, val, *len, m_precision);
363 val += *len++;
364 int_range<1> tmp (type, lb, ub);
365 r.union_ (tmp);
366 }
367 }
368 read_wide_int (r.m_nonzero_mask, val, *len, m_precision);
369 if (r.m_kind == VR_VARYING)
370 r.m_kind = VR_RANGE;
371
372 if (flag_checking)
373 r.verify_range ();
374 }
375
376 bool
377 irange_storage::equal_p (const irange &r, tree type) const
378 {
379 if (m_kind == VR_UNDEFINED || r.undefined_p ())
380 return m_kind == r.m_kind;
381 if (m_kind == VR_VARYING || r.varying_p ())
382 return m_kind == r.m_kind && types_compatible_p (r.type (), type);
383
384 tree rtype = r.type ();
385 if (!types_compatible_p (rtype, type))
386 return false;
387
388 // ?? We could make this faster by doing the comparison in place,
389 // without going through get_irange.
390 int_range_max tmp;
391 get_irange (tmp, rtype);
392 return tmp == r;
393 }
394
395 // Return the size in bytes to allocate storage that can hold R.
396
397 size_t
398 irange_storage::size (const irange &r)
399 {
400 if (r.undefined_p ())
401 return sizeof (irange_storage);
402
403 unsigned prec = TYPE_PRECISION (r.type ());
404 unsigned n = r.num_pairs () * 2 + 1;
405 unsigned hwi_size = ((n * WIDE_INT_MAX_HWIS (prec) - 1)
406 * sizeof (HOST_WIDE_INT));
407 unsigned len_size = n;
408 return sizeof (irange_storage) + hwi_size + len_size;
409 }
410
411 // Return TRUE if R fits in the current storage.
412
413 bool
414 irange_storage::fits_p (const irange &r) const
415 {
416 return m_max_ranges >= r.num_pairs ();
417 }
418
419 void
420 irange_storage::dump () const
421 {
422 fprintf (stderr, "irange_storage (prec=%d, ranges=%d):\n",
423 m_precision, m_num_ranges);
424
425 if (m_num_ranges == 0)
426 return;
427
428 const HOST_WIDE_INT *val = &m_val[0];
429 const unsigned char *len = lengths_address ();
430 int i, j;
431
432 fprintf (stderr, " lengths = [ ");
433 for (i = 0; i < m_num_ranges * 2 + 1; ++i)
434 fprintf (stderr, "%d ", len[i]);
435 fprintf (stderr, "]\n");
436
437 for (i = 0; i < m_num_ranges; ++i)
438 {
439 for (j = 0; j < *len; ++j)
440 fprintf (stderr, " [PAIR %d] LB " HOST_WIDE_INT_PRINT_DEC "\n", i,
441 *val++);
442 ++len;
443 for (j = 0; j < *len; ++j)
444 fprintf (stderr, " [PAIR %d] UB " HOST_WIDE_INT_PRINT_DEC "\n", i,
445 *val++);
446 ++len;
447 }
448 for (j = 0; j < *len; ++j)
449 fprintf (stderr, " [NZ] " HOST_WIDE_INT_PRINT_DEC "\n", *val++);
450 }
451
452 DEBUG_FUNCTION void
453 debug (const irange_storage &storage)
454 {
455 storage.dump ();
456 fprintf (stderr, "\n");
457 }
458
459 //============================================================================
460 // frange_storage implementation
461 //============================================================================
462
463 // Allocate a new frange_storage object initialized to R.
464
465 frange_storage *
466 frange_storage::alloc (vrange_internal_alloc &allocator, const frange &r)
467 {
468 size_t size = sizeof (frange_storage);
469 frange_storage *p = static_cast <frange_storage *> (allocator.alloc (size));
470 new (p) frange_storage (r);
471 return p;
472 }
473
474 void
475 frange_storage::set_frange (const frange &r)
476 {
477 gcc_checking_assert (fits_p (r));
478
479 m_kind = r.m_kind;
480 m_min = r.m_min;
481 m_max = r.m_max;
482 m_pos_nan = r.m_pos_nan;
483 m_neg_nan = r.m_neg_nan;
484 }
485
486 void
487 frange_storage::get_frange (frange &r, tree type) const
488 {
489 gcc_checking_assert (r.supports_type_p (type));
490
491 // Handle explicit NANs.
492 if (m_kind == VR_NAN)
493 {
494 if (HONOR_NANS (type))
495 {
496 if (m_pos_nan && m_neg_nan)
497 r.set_nan (type);
498 else
499 r.set_nan (type, m_neg_nan);
500 }
501 else
502 r.set_undefined ();
503 return;
504 }
505 if (m_kind == VR_UNDEFINED)
506 {
507 r.set_undefined ();
508 return;
509 }
510
511 // We use the constructor to create the new range instead of writing
512 // out the bits into the frange directly, because the global range
513 // being read may be being inlined into a function with different
514 // restrictions as when it was originally written. We want to make
515 // sure the resulting range is canonicalized correctly for the new
516 // consumer.
517 r = frange (type, m_min, m_max, m_kind);
518
519 // The constructor will set the NAN bits for HONOR_NANS, but we must
520 // make sure to set the NAN sign if known.
521 if (HONOR_NANS (type) && (m_pos_nan ^ m_neg_nan) == 1)
522 r.update_nan (m_neg_nan);
523 else if (!m_pos_nan && !m_neg_nan)
524 r.clear_nan ();
525 }
526
527 bool
528 frange_storage::equal_p (const frange &r, tree type) const
529 {
530 if (r.undefined_p ())
531 return m_kind == VR_UNDEFINED;
532
533 tree rtype = type;
534 if (!types_compatible_p (rtype, type))
535 return false;
536
537 frange tmp;
538 get_frange (tmp, rtype);
539 return tmp == r;
540 }
541
542 bool
543 frange_storage::fits_p (const frange &) const
544 {
545 return true;
546 }
547
548 static vrange_allocator ggc_vrange_allocator (true);
549
550 vrange_storage *ggc_alloc_vrange_storage (tree type)
551 {
552 return ggc_vrange_allocator.clone_varying (type);
553 }
554
555 vrange_storage *ggc_alloc_vrange_storage (const vrange &r)
556 {
557 return ggc_vrange_allocator.clone (r);
558 }
This page took 0.062185 seconds and 5 git commands to generate.