]> gcc.gnu.org Git - gcc.git/blob - gcc/tree-affine.c
dse.c (record_store): Use HOST_BITS_PER_WIDE_INT instead of size of positions_needed...
[gcc.git] / gcc / tree-affine.c
1 /* Operations with affine combinations of trees.
2 Copyright (C) 2005, 2007 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "hard-reg-set.h"
28 #include "output.h"
29 #include "diagnostic.h"
30 #include "tree-dump.h"
31 #include "pointer-set.h"
32 #include "tree-affine.h"
33 #include "tree-gimple.h"
34
35 /* Extends CST as appropriate for the affine combinations COMB. */
36
37 double_int
38 double_int_ext_for_comb (double_int cst, aff_tree *comb)
39 {
40 return double_int_sext (cst, TYPE_PRECISION (comb->type));
41 }
42
43 /* Initializes affine combination COMB so that its value is zero in TYPE. */
44
45 static void
46 aff_combination_zero (aff_tree *comb, tree type)
47 {
48 comb->type = type;
49 comb->offset = double_int_zero;
50 comb->n = 0;
51 comb->rest = NULL_TREE;
52 }
53
54 /* Sets COMB to CST. */
55
56 void
57 aff_combination_const (aff_tree *comb, tree type, double_int cst)
58 {
59 aff_combination_zero (comb, type);
60 comb->offset = double_int_ext_for_comb (cst, comb);
61 }
62
63 /* Sets COMB to single element ELT. */
64
65 void
66 aff_combination_elt (aff_tree *comb, tree type, tree elt)
67 {
68 aff_combination_zero (comb, type);
69
70 comb->n = 1;
71 comb->elts[0].val = elt;
72 comb->elts[0].coef = double_int_one;
73 }
74
75 /* Scales COMB by SCALE. */
76
77 void
78 aff_combination_scale (aff_tree *comb, double_int scale)
79 {
80 unsigned i, j;
81
82 scale = double_int_ext_for_comb (scale, comb);
83 if (double_int_one_p (scale))
84 return;
85
86 if (double_int_zero_p (scale))
87 {
88 aff_combination_zero (comb, comb->type);
89 return;
90 }
91
92 comb->offset
93 = double_int_ext_for_comb (double_int_mul (scale, comb->offset), comb);
94 for (i = 0, j = 0; i < comb->n; i++)
95 {
96 double_int new_coef;
97
98 new_coef
99 = double_int_ext_for_comb (double_int_mul (scale, comb->elts[i].coef),
100 comb);
101 /* A coefficient may become zero due to overflow. Remove the zero
102 elements. */
103 if (double_int_zero_p (new_coef))
104 continue;
105 comb->elts[j].coef = new_coef;
106 comb->elts[j].val = comb->elts[i].val;
107 j++;
108 }
109 comb->n = j;
110
111 if (comb->rest)
112 {
113 tree type = comb->type;
114 if (POINTER_TYPE_P (type))
115 type = sizetype;
116 if (comb->n < MAX_AFF_ELTS)
117 {
118 comb->elts[comb->n].coef = scale;
119 comb->elts[comb->n].val = comb->rest;
120 comb->rest = NULL_TREE;
121 comb->n++;
122 }
123 else
124 comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
125 double_int_to_tree (type, scale));
126 }
127 }
128
129 /* Adds ELT * SCALE to COMB. */
130
131 void
132 aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
133 {
134 unsigned i;
135 tree type;
136
137 scale = double_int_ext_for_comb (scale, comb);
138 if (double_int_zero_p (scale))
139 return;
140
141 for (i = 0; i < comb->n; i++)
142 if (operand_equal_p (comb->elts[i].val, elt, 0))
143 {
144 double_int new_coef;
145
146 new_coef = double_int_add (comb->elts[i].coef, scale);
147 new_coef = double_int_ext_for_comb (new_coef, comb);
148 if (!double_int_zero_p (new_coef))
149 {
150 comb->elts[i].coef = new_coef;
151 return;
152 }
153
154 comb->n--;
155 comb->elts[i] = comb->elts[comb->n];
156
157 if (comb->rest)
158 {
159 gcc_assert (comb->n == MAX_AFF_ELTS - 1);
160 comb->elts[comb->n].coef = double_int_one;
161 comb->elts[comb->n].val = comb->rest;
162 comb->rest = NULL_TREE;
163 comb->n++;
164 }
165 return;
166 }
167 if (comb->n < MAX_AFF_ELTS)
168 {
169 comb->elts[comb->n].coef = scale;
170 comb->elts[comb->n].val = elt;
171 comb->n++;
172 return;
173 }
174
175 type = comb->type;
176 if (POINTER_TYPE_P (type))
177 type = sizetype;
178
179 if (double_int_one_p (scale))
180 elt = fold_convert (type, elt);
181 else
182 elt = fold_build2 (MULT_EXPR, type,
183 fold_convert (type, elt),
184 double_int_to_tree (type, scale));
185
186 if (comb->rest)
187 comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
188 elt);
189 else
190 comb->rest = elt;
191 }
192
193 /* Adds CST to C. */
194
195 static void
196 aff_combination_add_cst (aff_tree *c, double_int cst)
197 {
198 c->offset = double_int_ext_for_comb (double_int_add (c->offset, cst), c);
199 }
200
201 /* Adds COMB2 to COMB1. */
202
203 void
204 aff_combination_add (aff_tree *comb1, aff_tree *comb2)
205 {
206 unsigned i;
207
208 aff_combination_add_cst (comb1, comb2->offset);
209 for (i = 0; i < comb2->n; i++)
210 aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
211 if (comb2->rest)
212 aff_combination_add_elt (comb1, comb2->rest, double_int_one);
213 }
214
215 /* Converts affine combination COMB to TYPE. */
216
217 void
218 aff_combination_convert (aff_tree *comb, tree type)
219 {
220 unsigned i, j;
221 tree comb_type = comb->type;
222
223 if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
224 {
225 tree val = fold_convert (type, aff_combination_to_tree (comb));
226 tree_to_aff_combination (val, type, comb);
227 return;
228 }
229
230 comb->type = type;
231 if (comb->rest && !POINTER_TYPE_P (type))
232 comb->rest = fold_convert (type, comb->rest);
233
234 if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
235 return;
236
237 comb->offset = double_int_ext_for_comb (comb->offset, comb);
238 for (i = j = 0; i < comb->n; i++)
239 {
240 double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb);
241 if (double_int_zero_p (new_coef))
242 continue;
243 comb->elts[j].coef = new_coef;
244 comb->elts[j].val = fold_convert (type, comb->elts[i].val);
245 j++;
246 }
247
248 comb->n = j;
249 if (comb->n < MAX_AFF_ELTS && comb->rest)
250 {
251 comb->elts[comb->n].coef = double_int_one;
252 comb->elts[comb->n].val = comb->rest;
253 comb->rest = NULL_TREE;
254 comb->n++;
255 }
256 }
257
258 /* Splits EXPR into an affine combination of parts. */
259
260 void
261 tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
262 {
263 aff_tree tmp;
264 enum tree_code code;
265 tree cst, core, toffset;
266 HOST_WIDE_INT bitpos, bitsize;
267 enum machine_mode mode;
268 int unsignedp, volatilep;
269
270 STRIP_NOPS (expr);
271
272 code = TREE_CODE (expr);
273 switch (code)
274 {
275 case INTEGER_CST:
276 aff_combination_const (comb, type, tree_to_double_int (expr));
277 return;
278
279 case POINTER_PLUS_EXPR:
280 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
281 tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
282 aff_combination_add (comb, &tmp);
283 return;
284
285 case PLUS_EXPR:
286 case MINUS_EXPR:
287 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
288 tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
289 if (code == MINUS_EXPR)
290 aff_combination_scale (&tmp, double_int_minus_one);
291 aff_combination_add (comb, &tmp);
292 return;
293
294 case MULT_EXPR:
295 cst = TREE_OPERAND (expr, 1);
296 if (TREE_CODE (cst) != INTEGER_CST)
297 break;
298 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
299 aff_combination_scale (comb, tree_to_double_int (cst));
300 return;
301
302 case NEGATE_EXPR:
303 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
304 aff_combination_scale (comb, double_int_minus_one);
305 return;
306
307 case BIT_NOT_EXPR:
308 /* ~x = -x - 1 */
309 tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
310 aff_combination_scale (comb, double_int_minus_one);
311 aff_combination_add_cst (comb, double_int_minus_one);
312 return;
313
314 case ADDR_EXPR:
315 core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
316 &toffset, &mode, &unsignedp, &volatilep,
317 false);
318 if (bitpos % BITS_PER_UNIT != 0)
319 break;
320 aff_combination_const (comb, type,
321 uhwi_to_double_int (bitpos / BITS_PER_UNIT));
322 core = build_fold_addr_expr (core);
323 if (TREE_CODE (core) == ADDR_EXPR)
324 aff_combination_add_elt (comb, core, double_int_one);
325 else
326 {
327 tree_to_aff_combination (core, type, &tmp);
328 aff_combination_add (comb, &tmp);
329 }
330 if (toffset)
331 {
332 tree_to_aff_combination (toffset, type, &tmp);
333 aff_combination_add (comb, &tmp);
334 }
335 return;
336
337 default:
338 break;
339 }
340
341 aff_combination_elt (comb, type, expr);
342 }
343
344 /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
345 combination COMB. */
346
347 static tree
348 add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
349 aff_tree *comb)
350 {
351 enum tree_code code;
352 tree type1 = type;
353 if (POINTER_TYPE_P (type))
354 type1 = sizetype;
355
356 scale = double_int_ext_for_comb (scale, comb);
357 elt = fold_convert (type1, elt);
358
359 if (double_int_one_p (scale))
360 {
361 if (!expr)
362 return fold_convert (type, elt);
363
364 if (POINTER_TYPE_P (type))
365 return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
366 return fold_build2 (PLUS_EXPR, type, expr, elt);
367 }
368
369 if (double_int_minus_one_p (scale))
370 {
371 if (!expr)
372 return fold_convert (type, fold_build1 (NEGATE_EXPR, type1, elt));
373
374 if (POINTER_TYPE_P (type))
375 {
376 elt = fold_build1 (NEGATE_EXPR, type1, elt);
377 return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
378 }
379 return fold_build2 (MINUS_EXPR, type, expr, elt);
380 }
381
382 if (!expr)
383 return fold_convert (type,
384 fold_build2 (MULT_EXPR, type1, elt,
385 double_int_to_tree (type1, scale)));
386
387 if (double_int_negative_p (scale))
388 {
389 code = MINUS_EXPR;
390 scale = double_int_neg (scale);
391 }
392 else
393 code = PLUS_EXPR;
394
395 elt = fold_build2 (MULT_EXPR, type1, elt,
396 double_int_to_tree (type1, scale));
397 if (POINTER_TYPE_P (type))
398 {
399 if (code == MINUS_EXPR)
400 elt = fold_build1 (NEGATE_EXPR, type1, elt);
401 return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
402 }
403 return fold_build2 (code, type, expr, elt);
404 }
405
406 /* Makes tree from the affine combination COMB. */
407
408 tree
409 aff_combination_to_tree (aff_tree *comb)
410 {
411 tree type = comb->type;
412 tree expr = comb->rest;
413 unsigned i;
414 double_int off, sgn;
415 tree type1 = type;
416 if (POINTER_TYPE_P (type))
417 type1 = sizetype;
418
419 gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
420
421 for (i = 0; i < comb->n; i++)
422 expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
423 comb);
424
425 /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
426 unsigned. */
427 if (double_int_negative_p (comb->offset))
428 {
429 off = double_int_neg (comb->offset);
430 sgn = double_int_minus_one;
431 }
432 else
433 {
434 off = comb->offset;
435 sgn = double_int_one;
436 }
437 return add_elt_to_tree (expr, type, double_int_to_tree (type1, off), sgn,
438 comb);
439 }
440
441 /* Copies the tree elements of COMB to ensure that they are not shared. */
442
443 void
444 unshare_aff_combination (aff_tree *comb)
445 {
446 unsigned i;
447
448 for (i = 0; i < comb->n; i++)
449 comb->elts[i].val = unshare_expr (comb->elts[i].val);
450 if (comb->rest)
451 comb->rest = unshare_expr (comb->rest);
452 }
453
454 /* Remove M-th element from COMB. */
455
456 void
457 aff_combination_remove_elt (aff_tree *comb, unsigned m)
458 {
459 comb->n--;
460 if (m <= comb->n)
461 comb->elts[m] = comb->elts[comb->n];
462 if (comb->rest)
463 {
464 comb->elts[comb->n].coef = double_int_one;
465 comb->elts[comb->n].val = comb->rest;
466 comb->rest = NULL_TREE;
467 comb->n++;
468 }
469 }
470
471 /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
472 C * COEF is added to R. */
473
474
475 static void
476 aff_combination_add_product (aff_tree *c, double_int coef, tree val,
477 aff_tree *r)
478 {
479 unsigned i;
480 tree aval, type;
481
482 for (i = 0; i < c->n; i++)
483 {
484 aval = c->elts[i].val;
485 if (val)
486 {
487 type = TREE_TYPE (aval);
488 aval = fold_build2 (MULT_EXPR, type, aval,
489 fold_convert (type, val));
490 }
491
492 aff_combination_add_elt (r, aval,
493 double_int_mul (coef, c->elts[i].coef));
494 }
495
496 if (c->rest)
497 {
498 aval = c->rest;
499 if (val)
500 {
501 type = TREE_TYPE (aval);
502 aval = fold_build2 (MULT_EXPR, type, aval,
503 fold_convert (type, val));
504 }
505
506 aff_combination_add_elt (r, aval, coef);
507 }
508
509 if (val)
510 aff_combination_add_elt (r, val,
511 double_int_mul (coef, c->offset));
512 else
513 aff_combination_add_cst (r, double_int_mul (coef, c->offset));
514 }
515
516 /* Multiplies C1 by C2, storing the result to R */
517
518 void
519 aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
520 {
521 unsigned i;
522 gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
523
524 aff_combination_zero (r, c1->type);
525
526 for (i = 0; i < c2->n; i++)
527 aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
528 if (c2->rest)
529 aff_combination_add_product (c1, double_int_one, c2->rest, r);
530 aff_combination_add_product (c1, c2->offset, NULL, r);
531 }
532
533 /* Returns the element of COMB whose value is VAL, or NULL if no such
534 element exists. If IDX is not NULL, it is set to the index of VAL in
535 COMB. */
536
537 static struct aff_comb_elt *
538 aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
539 {
540 unsigned i;
541
542 for (i = 0; i < comb->n; i++)
543 if (operand_equal_p (comb->elts[i].val, val, 0))
544 {
545 if (idx)
546 *idx = i;
547
548 return &comb->elts[i];
549 }
550
551 return NULL;
552 }
553
554 /* Element of the cache that maps ssa name NAME to its expanded form
555 as an affine expression EXPANSION. */
556
557 struct name_expansion
558 {
559 aff_tree expansion;
560
561 /* True if the expansion for the name is just being generated. */
562 unsigned in_progress : 1;
563 };
564
565 /* Expands SSA names in COMB recursively. CACHE is used to cache the
566 results. */
567
568 void
569 aff_combination_expand (aff_tree *comb, struct pointer_map_t **cache)
570 {
571 unsigned i;
572 aff_tree to_add, current, curre;
573 tree e, def, rhs;
574 double_int scale;
575 void **slot;
576 struct name_expansion *exp;
577
578 aff_combination_zero (&to_add, comb->type);
579 for (i = 0; i < comb->n; i++)
580 {
581 e = comb->elts[i].val;
582 if (TREE_CODE (e) != SSA_NAME)
583 continue;
584 def = SSA_NAME_DEF_STMT (e);
585 if (TREE_CODE (def) != GIMPLE_MODIFY_STMT
586 || GIMPLE_STMT_OPERAND (def, 0) != e)
587 continue;
588
589 rhs = GIMPLE_STMT_OPERAND (def, 1);
590 if (TREE_CODE (rhs) != SSA_NAME
591 && !EXPR_P (rhs)
592 && !is_gimple_min_invariant (rhs))
593 continue;
594
595 /* We do not know whether the reference retains its value at the
596 place where the expansion is used. */
597 if (REFERENCE_CLASS_P (rhs))
598 continue;
599
600 if (!*cache)
601 *cache = pointer_map_create ();
602 slot = pointer_map_insert (*cache, e);
603 exp = *slot;
604
605 if (!exp)
606 {
607 exp = XNEW (struct name_expansion);
608 exp->in_progress = 1;
609 *slot = exp;
610 tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
611 exp->expansion = current;
612 exp->in_progress = 0;
613 }
614 else
615 {
616 /* Since we follow the definitions in the SSA form, we should not
617 enter a cycle unless we pass through a phi node. */
618 gcc_assert (!exp->in_progress);
619 current = exp->expansion;
620 }
621
622 /* Accumulate the new terms to TO_ADD, so that we do not modify
623 COMB while traversing it; include the term -coef * E, to remove
624 it from COMB. */
625 scale = comb->elts[i].coef;
626 aff_combination_zero (&curre, comb->type);
627 aff_combination_add_elt (&curre, e, double_int_neg (scale));
628 aff_combination_scale (&current, scale);
629 aff_combination_add (&to_add, &current);
630 aff_combination_add (&to_add, &curre);
631 }
632 aff_combination_add (comb, &to_add);
633 }
634
635 /* Similar to tree_to_aff_combination, but follows SSA name definitions
636 and expands them recursively. CACHE is used to cache the expansions
637 of the ssa names, to avoid exponential time complexity for cases
638 like
639
640 a1 = a0 + a0;
641 a2 = a1 + a1;
642 a3 = a2 + a2;
643 ... */
644
645 void
646 tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
647 struct pointer_map_t **cache)
648 {
649 tree_to_aff_combination (expr, type, comb);
650 aff_combination_expand (comb, cache);
651 }
652
653 /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
654 pointer_map_traverse. */
655
656 static bool
657 free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value,
658 void *data ATTRIBUTE_UNUSED)
659 {
660 struct name_expansion *exp = *value;
661
662 free (exp);
663 return true;
664 }
665
666 /* Frees memory allocated for the CACHE used by
667 tree_to_aff_combination_expand. */
668
669 void
670 free_affine_expand_cache (struct pointer_map_t **cache)
671 {
672 if (!*cache)
673 return;
674
675 pointer_map_traverse (*cache, free_name_expansion, NULL);
676 pointer_map_destroy (*cache);
677 *cache = NULL;
678 }
679
680 /* If VAL != CST * DIV for any constant CST, returns false.
681 Otherwise, if VAL != 0 (and hence CST != 0), and *MULT_SET is true,
682 additionally compares CST and MULT, and if they are different,
683 returns false. Finally, if neither of these two cases occur,
684 true is returned, and if CST != 0, CST is stored to MULT and
685 MULT_SET is set to true. */
686
687 static bool
688 double_int_constant_multiple_p (double_int val, double_int div,
689 bool *mult_set, double_int *mult)
690 {
691 double_int rem, cst;
692
693 if (double_int_zero_p (val))
694 return true;
695
696 if (double_int_zero_p (div))
697 return false;
698
699 cst = double_int_sdivmod (val, div, FLOOR_DIV_EXPR, &rem);
700 if (!double_int_zero_p (rem))
701 return false;
702
703 if (*mult_set && !double_int_equal_p (*mult, cst))
704 return false;
705
706 *mult_set = true;
707 *mult = cst;
708 return true;
709 }
710
711 /* Returns true if VAL = X * DIV for some constant X. If this is the case,
712 X is stored to MULT. */
713
714 bool
715 aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
716 double_int *mult)
717 {
718 bool mult_set = false;
719 unsigned i;
720
721 if (val->n == 0 && double_int_zero_p (val->offset))
722 {
723 *mult = double_int_zero;
724 return true;
725 }
726 if (val->n != div->n)
727 return false;
728
729 if (val->rest || div->rest)
730 return false;
731
732 if (!double_int_constant_multiple_p (val->offset, div->offset,
733 &mult_set, mult))
734 return false;
735
736 for (i = 0; i < div->n; i++)
737 {
738 struct aff_comb_elt *elt
739 = aff_combination_find_elt (val, div->elts[i].val, NULL);
740 if (!elt)
741 return false;
742 if (!double_int_constant_multiple_p (elt->coef, div->elts[i].coef,
743 &mult_set, mult))
744 return false;
745 }
746
747 gcc_assert (mult_set);
748 return true;
749 }
750
751 /* Prints the affine VAL to the FILE. */
752
753 void
754 print_aff (FILE *file, aff_tree *val)
755 {
756 unsigned i;
757 bool uns = TYPE_UNSIGNED (val->type);
758 if (POINTER_TYPE_P (val->type))
759 uns = false;
760 fprintf (file, "{\n type = ");
761 print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
762 fprintf (file, "\n offset = ");
763 dump_double_int (file, val->offset, uns);
764 if (val->n > 0)
765 {
766 fprintf (file, "\n elements = {\n");
767 for (i = 0; i < val->n; i++)
768 {
769 fprintf (file, " [%d] = ", i);
770 print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
771
772 fprintf (file, " * ");
773 dump_double_int (file, val->elts[i].coef, uns);
774 if (i != val->n - 1)
775 fprintf (file, ", \n");
776 }
777 fprintf (file, "\n }");
778 }
779 if (val->rest)
780 {
781 fprintf (file, "\n rest = ");
782 print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
783 }
784 fprintf (file, "\n}");
785 }
786
787 /* Prints the affine VAL to the standard error, used for debugging. */
788
789 void
790 debug_aff (aff_tree *val)
791 {
792 print_aff (stderr, val);
793 fprintf (stderr, "\n");
794 }
795
796 /* Returns address of the reference REF in ADDR. The size of the accessed
797 location is stored to SIZE. */
798
799 void
800 get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size)
801 {
802 HOST_WIDE_INT bitsize, bitpos;
803 tree toff;
804 enum machine_mode mode;
805 int uns, vol;
806 aff_tree tmp;
807 tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
808 &uns, &vol, false);
809 tree base_addr = build_fold_addr_expr (base);
810
811 /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
812
813 tree_to_aff_combination (base_addr, sizetype, addr);
814
815 if (toff)
816 {
817 tree_to_aff_combination (toff, sizetype, &tmp);
818 aff_combination_add (addr, &tmp);
819 }
820
821 aff_combination_const (&tmp, sizetype,
822 shwi_to_double_int (bitpos / BITS_PER_UNIT));
823 aff_combination_add (addr, &tmp);
824
825 *size = shwi_to_double_int ((bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
826 }
827
This page took 0.074166 seconds and 5 git commands to generate.