]> gcc.gnu.org Git - gcc.git/blob - gcc/ada/gcc-interface/utils2.c
ada-tree.h (DECL_GLOBAL_NONCONSTANT_RENAMING_P): Delete
[gcc.git] / gcc / ada / gcc-interface / utils2.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2015, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "hash-set.h"
31 #include "machmode.h"
32 #include "vec.h"
33 #include "double-int.h"
34 #include "input.h"
35 #include "alias.h"
36 #include "symtab.h"
37 #include "wide-int.h"
38 #include "inchash.h"
39 #include "tree.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "stringpool.h"
43 #include "varasm.h"
44 #include "flags.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "tree-inline.h"
48
49 #include "ada.h"
50 #include "types.h"
51 #include "atree.h"
52 #include "elists.h"
53 #include "namet.h"
54 #include "nlists.h"
55 #include "snames.h"
56 #include "stringt.h"
57 #include "uintp.h"
58 #include "fe.h"
59 #include "sinfo.h"
60 #include "einfo.h"
61 #include "ada-tree.h"
62 #include "gigi.h"
63
64 /* Return the base type of TYPE. */
65
66 tree
67 get_base_type (tree type)
68 {
69 if (TREE_CODE (type) == RECORD_TYPE
70 && TYPE_JUSTIFIED_MODULAR_P (type))
71 type = TREE_TYPE (TYPE_FIELDS (type));
72
73 while (TREE_TYPE (type)
74 && (TREE_CODE (type) == INTEGER_TYPE
75 || TREE_CODE (type) == REAL_TYPE))
76 type = TREE_TYPE (type);
77
78 return type;
79 }
80 \f
81 /* EXP is a GCC tree representing an address. See if we can find how
82 strictly the object at that address is aligned. Return that alignment
83 in bits. If we don't know anything about the alignment, return 0. */
84
85 unsigned int
86 known_alignment (tree exp)
87 {
88 unsigned int this_alignment;
89 unsigned int lhs, rhs;
90
91 switch (TREE_CODE (exp))
92 {
93 CASE_CONVERT:
94 case VIEW_CONVERT_EXPR:
95 case NON_LVALUE_EXPR:
96 /* Conversions between pointers and integers don't change the alignment
97 of the underlying object. */
98 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
99 break;
100
101 case COMPOUND_EXPR:
102 /* The value of a COMPOUND_EXPR is that of it's second operand. */
103 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
104 break;
105
106 case PLUS_EXPR:
107 case MINUS_EXPR:
108 /* If two address are added, the alignment of the result is the
109 minimum of the two alignments. */
110 lhs = known_alignment (TREE_OPERAND (exp, 0));
111 rhs = known_alignment (TREE_OPERAND (exp, 1));
112 this_alignment = MIN (lhs, rhs);
113 break;
114
115 case POINTER_PLUS_EXPR:
116 lhs = known_alignment (TREE_OPERAND (exp, 0));
117 rhs = known_alignment (TREE_OPERAND (exp, 1));
118 /* If we don't know the alignment of the offset, we assume that
119 of the base. */
120 if (rhs == 0)
121 this_alignment = lhs;
122 else
123 this_alignment = MIN (lhs, rhs);
124 break;
125
126 case COND_EXPR:
127 /* If there is a choice between two values, use the smallest one. */
128 lhs = known_alignment (TREE_OPERAND (exp, 1));
129 rhs = known_alignment (TREE_OPERAND (exp, 2));
130 this_alignment = MIN (lhs, rhs);
131 break;
132
133 case INTEGER_CST:
134 {
135 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
136 /* The first part of this represents the lowest bit in the constant,
137 but it is originally in bytes, not bits. */
138 this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
139 }
140 break;
141
142 case MULT_EXPR:
143 /* If we know the alignment of just one side, use it. Otherwise,
144 use the product of the alignments. */
145 lhs = known_alignment (TREE_OPERAND (exp, 0));
146 rhs = known_alignment (TREE_OPERAND (exp, 1));
147
148 if (lhs == 0)
149 this_alignment = rhs;
150 else if (rhs == 0)
151 this_alignment = lhs;
152 else
153 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
154 break;
155
156 case BIT_AND_EXPR:
157 /* A bit-and expression is as aligned as the maximum alignment of the
158 operands. We typically get here for a complex lhs and a constant
159 negative power of two on the rhs to force an explicit alignment, so
160 don't bother looking at the lhs. */
161 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
162 break;
163
164 case ADDR_EXPR:
165 this_alignment = expr_align (TREE_OPERAND (exp, 0));
166 break;
167
168 case CALL_EXPR:
169 {
170 tree t = maybe_inline_call_in_expr (exp);
171 if (t)
172 return known_alignment (t);
173 }
174
175 /* Fall through... */
176
177 default:
178 /* For other pointer expressions, we assume that the pointed-to object
179 is at least as aligned as the pointed-to type. Beware that we can
180 have a dummy type here (e.g. a Taft Amendment type), for which the
181 alignment is meaningless and should be ignored. */
182 if (POINTER_TYPE_P (TREE_TYPE (exp))
183 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
184 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
185 else
186 this_alignment = 0;
187 break;
188 }
189
190 return this_alignment;
191 }
192 \f
193 /* We have a comparison or assignment operation on two types, T1 and T2, which
194 are either both array types or both record types. T1 is assumed to be for
195 the left hand side operand, and T2 for the right hand side. Return the
196 type that both operands should be converted to for the operation, if any.
197 Otherwise return zero. */
198
199 static tree
200 find_common_type (tree t1, tree t2)
201 {
202 /* ??? As of today, various constructs lead to here with types of different
203 sizes even when both constants (e.g. tagged types, packable vs regular
204 component types, padded vs unpadded types, ...). While some of these
205 would better be handled upstream (types should be made consistent before
206 calling into build_binary_op), some others are really expected and we
207 have to be careful. */
208
209 /* We must avoid writing more than what the target can hold if this is for
210 an assignment and the case of tagged types is handled in build_binary_op
211 so we use the lhs type if it is known to be smaller or of constant size
212 and the rhs type is not, whatever the modes. We also force t1 in case of
213 constant size equality to minimize occurrences of view conversions on the
214 lhs of an assignment, except for the case of record types with a variant
215 part on the lhs but not on the rhs to make the conversion simpler. */
216 if (TREE_CONSTANT (TYPE_SIZE (t1))
217 && (!TREE_CONSTANT (TYPE_SIZE (t2))
218 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
219 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
220 && !(TREE_CODE (t1) == RECORD_TYPE
221 && TREE_CODE (t2) == RECORD_TYPE
222 && get_variant_part (t1) != NULL_TREE
223 && get_variant_part (t2) == NULL_TREE))))
224 return t1;
225
226 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
227 that we will not have any alignment problems since, if we did, the
228 non-BLKmode type could not have been used. */
229 if (TYPE_MODE (t1) != BLKmode)
230 return t1;
231
232 /* If the rhs type is of constant size, use it whatever the modes. At
233 this point it is known to be smaller, or of constant size and the
234 lhs type is not. */
235 if (TREE_CONSTANT (TYPE_SIZE (t2)))
236 return t2;
237
238 /* Otherwise, if the rhs type is non-BLKmode, use it. */
239 if (TYPE_MODE (t2) != BLKmode)
240 return t2;
241
242 /* In this case, both types have variable size and BLKmode. It's
243 probably best to leave the "type mismatch" because changing it
244 could cause a bad self-referential reference. */
245 return NULL_TREE;
246 }
247 \f
248 /* Return an expression tree representing an equality comparison of A1 and A2,
249 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
250
251 Two arrays are equal in one of two ways: (1) if both have zero length in
252 some dimension (not necessarily the same dimension) or (2) if the lengths
253 in each dimension are equal and the data is equal. We perform the length
254 tests in as efficient a manner as possible. */
255
256 static tree
257 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
258 {
259 tree result = convert (result_type, boolean_true_node);
260 tree a1_is_null = convert (result_type, boolean_false_node);
261 tree a2_is_null = convert (result_type, boolean_false_node);
262 tree t1 = TREE_TYPE (a1);
263 tree t2 = TREE_TYPE (a2);
264 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
265 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
266 bool length_zero_p = false;
267
268 /* If the operands have side-effects, they need to be evaluated only once
269 in spite of the multiple references in the comparison. */
270 if (a1_side_effects_p)
271 a1 = gnat_protect_expr (a1);
272
273 if (a2_side_effects_p)
274 a2 = gnat_protect_expr (a2);
275
276 /* Process each dimension separately and compare the lengths. If any
277 dimension has a length known to be zero, set LENGTH_ZERO_P to true
278 in order to suppress the comparison of the data at the end. */
279 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
280 {
281 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
282 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
283 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
284 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
285 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
286 size_one_node);
287 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
288 size_one_node);
289 tree comparison, this_a1_is_null, this_a2_is_null;
290
291 /* If the length of the first array is a constant, swap our operands
292 unless the length of the second array is the constant zero. */
293 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
294 {
295 tree tem;
296 bool btem;
297
298 tem = a1, a1 = a2, a2 = tem;
299 tem = t1, t1 = t2, t2 = tem;
300 tem = lb1, lb1 = lb2, lb2 = tem;
301 tem = ub1, ub1 = ub2, ub2 = tem;
302 tem = length1, length1 = length2, length2 = tem;
303 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
304 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
305 a2_side_effects_p = btem;
306 }
307
308 /* If the length of the second array is the constant zero, we can just
309 use the original stored bounds for the first array and see whether
310 last < first holds. */
311 if (integer_zerop (length2))
312 {
313 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
314
315 length_zero_p = true;
316
317 ub1
318 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
319 lb1
320 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
321
322 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
323 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
324 if (EXPR_P (comparison))
325 SET_EXPR_LOCATION (comparison, loc);
326
327 this_a1_is_null = comparison;
328 this_a2_is_null = convert (result_type, boolean_true_node);
329 }
330
331 /* Otherwise, if the length is some other constant value, we know that
332 this dimension in the second array cannot be superflat, so we can
333 just use its length computed from the actual stored bounds. */
334 else if (TREE_CODE (length2) == INTEGER_CST)
335 {
336 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
337
338 ub1
339 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
340 lb1
341 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
342 /* Note that we know that UB2 and LB2 are constant and hence
343 cannot contain a PLACEHOLDER_EXPR. */
344 ub2
345 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
346 lb2
347 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
348
349 comparison
350 = fold_build2_loc (loc, EQ_EXPR, result_type,
351 build_binary_op (MINUS_EXPR, b, ub1, lb1),
352 build_binary_op (MINUS_EXPR, b, ub2, lb2));
353 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
354 if (EXPR_P (comparison))
355 SET_EXPR_LOCATION (comparison, loc);
356
357 this_a1_is_null
358 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
359
360 this_a2_is_null = convert (result_type, boolean_false_node);
361 }
362
363 /* Otherwise, compare the computed lengths. */
364 else
365 {
366 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
367 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
368
369 comparison
370 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
371
372 /* If the length expression is of the form (cond ? val : 0), assume
373 that cond is equivalent to (length != 0). That's guaranteed by
374 construction of the array types in gnat_to_gnu_entity. */
375 if (TREE_CODE (length1) == COND_EXPR
376 && integer_zerop (TREE_OPERAND (length1, 2)))
377 this_a1_is_null
378 = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
379 else
380 this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
381 length1, size_zero_node);
382
383 /* Likewise for the second array. */
384 if (TREE_CODE (length2) == COND_EXPR
385 && integer_zerop (TREE_OPERAND (length2, 2)))
386 this_a2_is_null
387 = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
388 else
389 this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
390 length2, size_zero_node);
391 }
392
393 /* Append expressions for this dimension to the final expressions. */
394 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
395 result, comparison);
396
397 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
398 this_a1_is_null, a1_is_null);
399
400 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
401 this_a2_is_null, a2_is_null);
402
403 t1 = TREE_TYPE (t1);
404 t2 = TREE_TYPE (t2);
405 }
406
407 /* Unless the length of some dimension is known to be zero, compare the
408 data in the array. */
409 if (!length_zero_p)
410 {
411 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
412 tree comparison;
413
414 if (type)
415 {
416 a1 = convert (type, a1),
417 a2 = convert (type, a2);
418 }
419
420 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
421
422 result
423 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
424 }
425
426 /* The result is also true if both sizes are zero. */
427 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
428 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
429 a1_is_null, a2_is_null),
430 result);
431
432 /* If the operands have side-effects, they need to be evaluated before
433 doing the tests above since the place they otherwise would end up
434 being evaluated at run time could be wrong. */
435 if (a1_side_effects_p)
436 result = build2 (COMPOUND_EXPR, result_type, a1, result);
437
438 if (a2_side_effects_p)
439 result = build2 (COMPOUND_EXPR, result_type, a2, result);
440
441 return result;
442 }
443
444 /* Return an expression tree representing an equality comparison of P1 and P2,
445 two objects of fat pointer type. The result should be of type RESULT_TYPE.
446
447 Two fat pointers are equal in one of two ways: (1) if both have a null
448 pointer to the array or (2) if they contain the same couple of pointers.
449 We perform the comparison in as efficient a manner as possible. */
450
451 static tree
452 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
453 {
454 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
455 tree p1_array_is_null, p2_array_is_null;
456
457 /* If either operand has side-effects, they have to be evaluated only once
458 in spite of the multiple references to the operand in the comparison. */
459 p1 = gnat_protect_expr (p1);
460 p2 = gnat_protect_expr (p2);
461
462 /* The constant folder doesn't fold fat pointer types so we do it here. */
463 if (TREE_CODE (p1) == CONSTRUCTOR)
464 p1_array = CONSTRUCTOR_ELT (p1, 0)->value;
465 else
466 p1_array = build_component_ref (p1, NULL_TREE,
467 TYPE_FIELDS (TREE_TYPE (p1)), true);
468
469 p1_array_is_null
470 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
471 fold_convert_loc (loc, TREE_TYPE (p1_array),
472 null_pointer_node));
473
474 if (TREE_CODE (p2) == CONSTRUCTOR)
475 p2_array = CONSTRUCTOR_ELT (p2, 0)->value;
476 else
477 p2_array = build_component_ref (p2, NULL_TREE,
478 TYPE_FIELDS (TREE_TYPE (p2)), true);
479
480 p2_array_is_null
481 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
482 fold_convert_loc (loc, TREE_TYPE (p2_array),
483 null_pointer_node));
484
485 /* If one of the pointers to the array is null, just compare the other. */
486 if (integer_zerop (p1_array))
487 return p2_array_is_null;
488 else if (integer_zerop (p2_array))
489 return p1_array_is_null;
490
491 /* Otherwise, do the fully-fledged comparison. */
492 same_array
493 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
494
495 if (TREE_CODE (p1) == CONSTRUCTOR)
496 p1_bounds = CONSTRUCTOR_ELT (p1, 1)->value;
497 else
498 p1_bounds
499 = build_component_ref (p1, NULL_TREE,
500 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))), true);
501
502 if (TREE_CODE (p2) == CONSTRUCTOR)
503 p2_bounds = CONSTRUCTOR_ELT (p2, 1)->value;
504 else
505 p2_bounds
506 = build_component_ref (p2, NULL_TREE,
507 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))), true);
508
509 same_bounds
510 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
511
512 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
513 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
514 build_binary_op (TRUTH_ORIF_EXPR, result_type,
515 p1_array_is_null, same_bounds));
516 }
517 \f
518 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
519 type TYPE. We know that TYPE is a modular type with a nonbinary
520 modulus. */
521
522 static tree
523 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
524 tree rhs)
525 {
526 tree modulus = TYPE_MODULUS (type);
527 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
528 unsigned int precision;
529 bool unsignedp = true;
530 tree op_type = type;
531 tree result;
532
533 /* If this is an addition of a constant, convert it to a subtraction
534 of a constant since we can do that faster. */
535 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
536 {
537 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
538 op_code = MINUS_EXPR;
539 }
540
541 /* For the logical operations, we only need PRECISION bits. For
542 addition and subtraction, we need one more and for multiplication we
543 need twice as many. But we never want to make a size smaller than
544 our size. */
545 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
546 needed_precision += 1;
547 else if (op_code == MULT_EXPR)
548 needed_precision *= 2;
549
550 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
551
552 /* Unsigned will do for everything but subtraction. */
553 if (op_code == MINUS_EXPR)
554 unsignedp = false;
555
556 /* If our type is the wrong signedness or isn't wide enough, make a new
557 type and convert both our operands to it. */
558 if (TYPE_PRECISION (op_type) < precision
559 || TYPE_UNSIGNED (op_type) != unsignedp)
560 {
561 /* Copy the node so we ensure it can be modified to make it modular. */
562 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
563 modulus = convert (op_type, modulus);
564 SET_TYPE_MODULUS (op_type, modulus);
565 TYPE_MODULAR_P (op_type) = 1;
566 lhs = convert (op_type, lhs);
567 rhs = convert (op_type, rhs);
568 }
569
570 /* Do the operation, then we'll fix it up. */
571 result = fold_build2 (op_code, op_type, lhs, rhs);
572
573 /* For multiplication, we have no choice but to do a full modulus
574 operation. However, we want to do this in the narrowest
575 possible size. */
576 if (op_code == MULT_EXPR)
577 {
578 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
579 modulus = convert (div_type, modulus);
580 SET_TYPE_MODULUS (div_type, modulus);
581 TYPE_MODULAR_P (div_type) = 1;
582 result = convert (op_type,
583 fold_build2 (TRUNC_MOD_EXPR, div_type,
584 convert (div_type, result), modulus));
585 }
586
587 /* For subtraction, add the modulus back if we are negative. */
588 else if (op_code == MINUS_EXPR)
589 {
590 result = gnat_protect_expr (result);
591 result = fold_build3 (COND_EXPR, op_type,
592 fold_build2 (LT_EXPR, boolean_type_node, result,
593 convert (op_type, integer_zero_node)),
594 fold_build2 (PLUS_EXPR, op_type, result, modulus),
595 result);
596 }
597
598 /* For the other operations, subtract the modulus if we are >= it. */
599 else
600 {
601 result = gnat_protect_expr (result);
602 result = fold_build3 (COND_EXPR, op_type,
603 fold_build2 (GE_EXPR, boolean_type_node,
604 result, modulus),
605 fold_build2 (MINUS_EXPR, op_type,
606 result, modulus),
607 result);
608 }
609
610 return convert (type, result);
611 }
612 \f
613 /* This page contains routines that implement the Ada semantics with regard
614 to atomic objects. They are fully piggybacked on the middle-end support
615 for atomic loads and stores.
616
617 *** Memory barriers and volatile objects ***
618
619 We implement the weakened form of the C.6(16) clause that was introduced
620 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
621 implementable without significant performance hits on modern platforms.
622
623 We also take advantage of the requirements imposed on shared variables by
624 9.10 (conditions for sequential actions) to have non-erroneous execution
625 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
626 volatile updates with regard to sequential actions, i.e. with regard to
627 reads or updates of atomic objects.
628
629 As such, an update of an atomic object by a task requires that all earlier
630 accesses to volatile objects have completed. Similarly, later accesses to
631 volatile objects cannot be reordered before the update of the atomic object.
632 So, memory barriers both before and after the atomic update are needed.
633
634 For a read of an atomic object, to avoid seeing writes of volatile objects
635 by a task earlier than by the other tasks, a memory barrier is needed before
636 the atomic read. Finally, to avoid reordering later reads or updates of
637 volatile objects to before the atomic read, a barrier is needed after the
638 atomic read.
639
640 So, memory barriers are needed before and after atomic reads and updates.
641 And, in order to simplify the implementation, we use full memory barriers
642 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
643
644 /* Return the size of TYPE, which must be a positive power of 2. */
645
646 static unsigned int
647 resolve_atomic_size (tree type)
648 {
649 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
650
651 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
652 return size;
653
654 /* We shouldn't reach here without having already detected that the size
655 isn't compatible with an atomic access. */
656 gcc_assert (Serious_Errors_Detected);
657
658 return 0;
659 }
660
661 /* Build an atomic load for the underlying atomic object in SRC. SYNC is
662 true if the load requires synchronization. */
663
664 tree
665 build_atomic_load (tree src, bool sync)
666 {
667 tree ptr_type
668 = build_pointer_type
669 (build_qualified_type (void_type_node,
670 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
671 tree mem_model
672 = build_int_cst (integer_type_node,
673 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
674 tree orig_src = src;
675 tree t, addr, val;
676 unsigned int size;
677 int fncode;
678
679 /* Remove conversions to get the address of the underlying object. */
680 src = remove_conversions (src, false);
681 size = resolve_atomic_size (TREE_TYPE (src));
682 if (size == 0)
683 return orig_src;
684
685 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
686 t = builtin_decl_implicit ((enum built_in_function) fncode);
687
688 addr = build_unary_op (ADDR_EXPR, ptr_type, src);
689 val = build_call_expr (t, 2, addr, mem_model);
690
691 /* First reinterpret the loaded bits in the original type of the load,
692 then convert to the expected result type. */
693 t = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (src), val);
694 return convert (TREE_TYPE (orig_src), t);
695 }
696
697 /* Build an atomic store from SRC to the underlying atomic object in DEST.
698 SYNC is true if the store requires synchronization. */
699
700 tree
701 build_atomic_store (tree dest, tree src, bool sync)
702 {
703 tree ptr_type
704 = build_pointer_type
705 (build_qualified_type (void_type_node,
706 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
707 tree mem_model
708 = build_int_cst (integer_type_node,
709 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
710 tree orig_dest = dest;
711 tree t, int_type, addr;
712 unsigned int size;
713 int fncode;
714
715 /* Remove conversions to get the address of the underlying object. */
716 dest = remove_conversions (dest, false);
717 size = resolve_atomic_size (TREE_TYPE (dest));
718 if (size == 0)
719 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
720
721 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
722 t = builtin_decl_implicit ((enum built_in_function) fncode);
723 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
724
725 /* First convert the bits to be stored to the original type of the store,
726 then reinterpret them in the effective type. But if the original type
727 is a padded type with the same size, convert to the inner type instead,
728 as we don't want to artificially introduce a CONSTRUCTOR here. */
729 if (TYPE_IS_PADDING_P (TREE_TYPE (dest))
730 && TYPE_SIZE (TREE_TYPE (dest))
731 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest)))))
732 src = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest))), src);
733 else
734 src = convert (TREE_TYPE (dest), src);
735 src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
736 addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
737
738 return build_call_expr (t, 3, addr, src, mem_model);
739 }
740
741 /* Build a load-modify-store sequence from SRC to DEST. GNAT_NODE is used for
742 the location of the sequence. Note that, even though the load and the store
743 are both atomic, the sequence itself is not atomic. */
744
745 tree
746 build_load_modify_store (tree dest, tree src, Node_Id gnat_node)
747 {
748 /* We will be modifying DEST below so we build a copy. */
749 dest = copy_node (dest);
750 tree ref = dest;
751
752 while (handled_component_p (ref))
753 {
754 /* The load should already have been generated during the translation
755 of the GNAT destination tree; find it out in the GNU tree. */
756 if (TREE_CODE (TREE_OPERAND (ref, 0)) == VIEW_CONVERT_EXPR)
757 {
758 tree op = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
759 if (TREE_CODE (op) == CALL_EXPR && call_is_atomic_load (op))
760 {
761 tree type = TREE_TYPE (TREE_OPERAND (ref, 0));
762 tree t = CALL_EXPR_ARG (op, 0);
763 tree obj, temp, stmt;
764
765 /* Find out the loaded object. */
766 if (TREE_CODE (t) == NOP_EXPR)
767 t = TREE_OPERAND (t, 0);
768 if (TREE_CODE (t) == ADDR_EXPR)
769 obj = TREE_OPERAND (t, 0);
770 else
771 obj = build1 (INDIRECT_REF, type, t);
772
773 /* Drop atomic and volatile qualifiers for the temporary. */
774 type = TYPE_MAIN_VARIANT (type);
775
776 /* And drop BLKmode, if need be, to put it into a register. */
777 if (TYPE_MODE (type) == BLKmode)
778 {
779 unsigned int size = tree_to_uhwi (TYPE_SIZE (type));
780 type = copy_type (type);
781 SET_TYPE_MODE (type, mode_for_size (size, MODE_INT, 0));
782 }
783
784 /* Create the temporary by inserting a SAVE_EXPR. */
785 temp = build1 (SAVE_EXPR, type,
786 build1 (VIEW_CONVERT_EXPR, type, op));
787 TREE_OPERAND (ref, 0) = temp;
788
789 start_stmt_group ();
790
791 /* Build the modify of the temporary. */
792 stmt = build_binary_op (MODIFY_EXPR, NULL_TREE, dest, src);
793 add_stmt_with_node (stmt, gnat_node);
794
795 /* Build the store to the object. */
796 stmt = build_atomic_store (obj, temp, false);
797 add_stmt_with_node (stmt, gnat_node);
798
799 return end_stmt_group ();
800 }
801 }
802
803 TREE_OPERAND (ref, 0) = copy_node (TREE_OPERAND (ref, 0));
804 ref = TREE_OPERAND (ref, 0);
805 }
806
807 /* Something went wrong earlier if we have not found the atomic load. */
808 gcc_unreachable ();
809 }
810 \f
811 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
812 desired for the result. Usually the operation is to be performed
813 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
814 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
815 case the type to be used will be derived from the operands.
816
817 This function is very much unlike the ones for C and C++ since we
818 have already done any type conversion and matching required. All we
819 have to do here is validate the work done by SEM and handle subtypes. */
820
821 tree
822 build_binary_op (enum tree_code op_code, tree result_type,
823 tree left_operand, tree right_operand)
824 {
825 tree left_type = TREE_TYPE (left_operand);
826 tree right_type = TREE_TYPE (right_operand);
827 tree left_base_type = get_base_type (left_type);
828 tree right_base_type = get_base_type (right_type);
829 tree operation_type = result_type;
830 tree best_type = NULL_TREE;
831 tree modulus, result;
832 bool has_side_effects = false;
833
834 if (operation_type
835 && TREE_CODE (operation_type) == RECORD_TYPE
836 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
837 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
838
839 if (operation_type
840 && TREE_CODE (operation_type) == INTEGER_TYPE
841 && TYPE_EXTRA_SUBTYPE_P (operation_type))
842 operation_type = get_base_type (operation_type);
843
844 modulus = (operation_type
845 && TREE_CODE (operation_type) == INTEGER_TYPE
846 && TYPE_MODULAR_P (operation_type)
847 ? TYPE_MODULUS (operation_type) : NULL_TREE);
848
849 switch (op_code)
850 {
851 case INIT_EXPR:
852 case MODIFY_EXPR:
853 #ifdef ENABLE_CHECKING
854 gcc_assert (result_type == NULL_TREE);
855 #endif
856 /* If there were integral or pointer conversions on the LHS, remove
857 them; we'll be putting them back below if needed. Likewise for
858 conversions between array and record types, except for justified
859 modular types. But don't do this if the right operand is not
860 BLKmode (for packed arrays) unless we are not changing the mode. */
861 while ((CONVERT_EXPR_P (left_operand)
862 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
863 && (((INTEGRAL_TYPE_P (left_type)
864 || POINTER_TYPE_P (left_type))
865 && (INTEGRAL_TYPE_P (TREE_TYPE
866 (TREE_OPERAND (left_operand, 0)))
867 || POINTER_TYPE_P (TREE_TYPE
868 (TREE_OPERAND (left_operand, 0)))))
869 || (((TREE_CODE (left_type) == RECORD_TYPE
870 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
871 || TREE_CODE (left_type) == ARRAY_TYPE)
872 && ((TREE_CODE (TREE_TYPE
873 (TREE_OPERAND (left_operand, 0)))
874 == RECORD_TYPE)
875 || (TREE_CODE (TREE_TYPE
876 (TREE_OPERAND (left_operand, 0)))
877 == ARRAY_TYPE))
878 && (TYPE_MODE (right_type) == BLKmode
879 || (TYPE_MODE (left_type)
880 == TYPE_MODE (TREE_TYPE
881 (TREE_OPERAND
882 (left_operand, 0))))))))
883 {
884 left_operand = TREE_OPERAND (left_operand, 0);
885 left_type = TREE_TYPE (left_operand);
886 }
887
888 /* If a class-wide type may be involved, force use of the RHS type. */
889 if ((TREE_CODE (right_type) == RECORD_TYPE
890 || TREE_CODE (right_type) == UNION_TYPE)
891 && TYPE_ALIGN_OK (right_type))
892 operation_type = right_type;
893
894 /* If we are copying between padded objects with compatible types, use
895 the padded view of the objects, this is very likely more efficient.
896 Likewise for a padded object that is assigned a constructor, if we
897 can convert the constructor to the inner type, to avoid putting a
898 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
899 actually copied anything. */
900 else if (TYPE_IS_PADDING_P (left_type)
901 && TREE_CONSTANT (TYPE_SIZE (left_type))
902 && ((TREE_CODE (right_operand) == COMPONENT_REF
903 && TYPE_MAIN_VARIANT (left_type)
904 == TYPE_MAIN_VARIANT
905 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
906 || (TREE_CODE (right_operand) == CONSTRUCTOR
907 && !CONTAINS_PLACEHOLDER_P
908 (DECL_SIZE (TYPE_FIELDS (left_type)))))
909 && !integer_zerop (TYPE_SIZE (right_type)))
910 {
911 /* We make an exception for a BLKmode type padding a non-BLKmode
912 inner type and do the conversion of the LHS right away, since
913 unchecked_convert wouldn't do it properly. */
914 if (TYPE_MODE (left_type) == BLKmode
915 && TYPE_MODE (right_type) != BLKmode
916 && TREE_CODE (right_operand) != CONSTRUCTOR)
917 {
918 operation_type = right_type;
919 left_operand = convert (operation_type, left_operand);
920 left_type = operation_type;
921 }
922 else
923 operation_type = left_type;
924 }
925
926 /* If we have a call to a function that returns an unconstrained type
927 with default discriminant on the RHS, use the RHS type (which is
928 padded) as we cannot compute the size of the actual assignment. */
929 else if (TREE_CODE (right_operand) == CALL_EXPR
930 && TYPE_IS_PADDING_P (right_type)
931 && CONTAINS_PLACEHOLDER_P
932 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (right_type)))))
933 operation_type = right_type;
934
935 /* Find the best type to use for copying between aggregate types. */
936 else if (((TREE_CODE (left_type) == ARRAY_TYPE
937 && TREE_CODE (right_type) == ARRAY_TYPE)
938 || (TREE_CODE (left_type) == RECORD_TYPE
939 && TREE_CODE (right_type) == RECORD_TYPE))
940 && (best_type = find_common_type (left_type, right_type)))
941 operation_type = best_type;
942
943 /* Otherwise use the LHS type. */
944 else
945 operation_type = left_type;
946
947 /* Ensure everything on the LHS is valid. If we have a field reference,
948 strip anything that get_inner_reference can handle. Then remove any
949 conversions between types having the same code and mode. And mark
950 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
951 either an INDIRECT_REF, a NULL_EXPR, a SAVE_EXPR or a DECL node. */
952 result = left_operand;
953 while (true)
954 {
955 tree restype = TREE_TYPE (result);
956
957 if (TREE_CODE (result) == COMPONENT_REF
958 || TREE_CODE (result) == ARRAY_REF
959 || TREE_CODE (result) == ARRAY_RANGE_REF)
960 while (handled_component_p (result))
961 result = TREE_OPERAND (result, 0);
962 else if (TREE_CODE (result) == REALPART_EXPR
963 || TREE_CODE (result) == IMAGPART_EXPR
964 || (CONVERT_EXPR_P (result)
965 && (((TREE_CODE (restype)
966 == TREE_CODE (TREE_TYPE
967 (TREE_OPERAND (result, 0))))
968 && (TYPE_MODE (TREE_TYPE
969 (TREE_OPERAND (result, 0)))
970 == TYPE_MODE (restype)))
971 || TYPE_ALIGN_OK (restype))))
972 result = TREE_OPERAND (result, 0);
973 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
974 {
975 TREE_ADDRESSABLE (result) = 1;
976 result = TREE_OPERAND (result, 0);
977 }
978 else
979 break;
980 }
981
982 gcc_assert (TREE_CODE (result) == INDIRECT_REF
983 || TREE_CODE (result) == NULL_EXPR
984 || TREE_CODE (result) == SAVE_EXPR
985 || DECL_P (result));
986
987 /* Convert the right operand to the operation type unless it is
988 either already of the correct type or if the type involves a
989 placeholder, since the RHS may not have the same record type. */
990 if (operation_type != right_type
991 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
992 {
993 right_operand = convert (operation_type, right_operand);
994 right_type = operation_type;
995 }
996
997 /* If the left operand is not of the same type as the operation
998 type, wrap it up in a VIEW_CONVERT_EXPR. */
999 if (left_type != operation_type)
1000 left_operand = unchecked_convert (operation_type, left_operand, false);
1001
1002 has_side_effects = true;
1003 modulus = NULL_TREE;
1004 break;
1005
1006 case ARRAY_REF:
1007 if (!operation_type)
1008 operation_type = TREE_TYPE (left_type);
1009
1010 /* ... fall through ... */
1011
1012 case ARRAY_RANGE_REF:
1013 /* First look through conversion between type variants. Note that
1014 this changes neither the operation type nor the type domain. */
1015 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
1016 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
1017 == TYPE_MAIN_VARIANT (left_type))
1018 {
1019 left_operand = TREE_OPERAND (left_operand, 0);
1020 left_type = TREE_TYPE (left_operand);
1021 }
1022
1023 /* For a range, make sure the element type is consistent. */
1024 if (op_code == ARRAY_RANGE_REF
1025 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
1026 operation_type = build_array_type (TREE_TYPE (left_type),
1027 TYPE_DOMAIN (operation_type));
1028
1029 /* Then convert the right operand to its base type. This will prevent
1030 unneeded sign conversions when sizetype is wider than integer. */
1031 right_operand = convert (right_base_type, right_operand);
1032 right_operand = convert_to_index_type (right_operand);
1033 modulus = NULL_TREE;
1034 break;
1035
1036 case TRUTH_ANDIF_EXPR:
1037 case TRUTH_ORIF_EXPR:
1038 case TRUTH_AND_EXPR:
1039 case TRUTH_OR_EXPR:
1040 case TRUTH_XOR_EXPR:
1041 #ifdef ENABLE_CHECKING
1042 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1043 #endif
1044 operation_type = left_base_type;
1045 left_operand = convert (operation_type, left_operand);
1046 right_operand = convert (operation_type, right_operand);
1047 break;
1048
1049 case GE_EXPR:
1050 case LE_EXPR:
1051 case GT_EXPR:
1052 case LT_EXPR:
1053 case EQ_EXPR:
1054 case NE_EXPR:
1055 #ifdef ENABLE_CHECKING
1056 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1057 #endif
1058 /* If either operand is a NULL_EXPR, just return a new one. */
1059 if (TREE_CODE (left_operand) == NULL_EXPR)
1060 return build2 (op_code, result_type,
1061 build1 (NULL_EXPR, integer_type_node,
1062 TREE_OPERAND (left_operand, 0)),
1063 integer_zero_node);
1064
1065 else if (TREE_CODE (right_operand) == NULL_EXPR)
1066 return build2 (op_code, result_type,
1067 build1 (NULL_EXPR, integer_type_node,
1068 TREE_OPERAND (right_operand, 0)),
1069 integer_zero_node);
1070
1071 /* If either object is a justified modular types, get the
1072 fields from within. */
1073 if (TREE_CODE (left_type) == RECORD_TYPE
1074 && TYPE_JUSTIFIED_MODULAR_P (left_type))
1075 {
1076 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
1077 left_operand);
1078 left_type = TREE_TYPE (left_operand);
1079 left_base_type = get_base_type (left_type);
1080 }
1081
1082 if (TREE_CODE (right_type) == RECORD_TYPE
1083 && TYPE_JUSTIFIED_MODULAR_P (right_type))
1084 {
1085 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
1086 right_operand);
1087 right_type = TREE_TYPE (right_operand);
1088 right_base_type = get_base_type (right_type);
1089 }
1090
1091 /* If both objects are arrays, compare them specially. */
1092 if ((TREE_CODE (left_type) == ARRAY_TYPE
1093 || (TREE_CODE (left_type) == INTEGER_TYPE
1094 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
1095 && (TREE_CODE (right_type) == ARRAY_TYPE
1096 || (TREE_CODE (right_type) == INTEGER_TYPE
1097 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1098 {
1099 result = compare_arrays (input_location,
1100 result_type, left_operand, right_operand);
1101 if (op_code == NE_EXPR)
1102 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1103 else
1104 gcc_assert (op_code == EQ_EXPR);
1105
1106 return result;
1107 }
1108
1109 /* Otherwise, the base types must be the same, unless they are both fat
1110 pointer types or record types. In the latter case, use the best type
1111 and convert both operands to that type. */
1112 if (left_base_type != right_base_type)
1113 {
1114 if (TYPE_IS_FAT_POINTER_P (left_base_type)
1115 && TYPE_IS_FAT_POINTER_P (right_base_type))
1116 {
1117 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1118 == TYPE_MAIN_VARIANT (right_base_type));
1119 best_type = left_base_type;
1120 }
1121
1122 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1123 && TREE_CODE (right_base_type) == RECORD_TYPE)
1124 {
1125 /* The only way this is permitted is if both types have the same
1126 name. In that case, one of them must not be self-referential.
1127 Use it as the best type. Even better with a fixed size. */
1128 gcc_assert (TYPE_NAME (left_base_type)
1129 && TYPE_NAME (left_base_type)
1130 == TYPE_NAME (right_base_type));
1131
1132 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1133 best_type = left_base_type;
1134 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1135 best_type = right_base_type;
1136 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1137 best_type = left_base_type;
1138 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1139 best_type = right_base_type;
1140 else
1141 gcc_unreachable ();
1142 }
1143
1144 else if (POINTER_TYPE_P (left_base_type)
1145 && POINTER_TYPE_P (right_base_type))
1146 {
1147 gcc_assert (TREE_TYPE (left_base_type)
1148 == TREE_TYPE (right_base_type));
1149 best_type = left_base_type;
1150 }
1151 else
1152 gcc_unreachable ();
1153
1154 left_operand = convert (best_type, left_operand);
1155 right_operand = convert (best_type, right_operand);
1156 }
1157 else
1158 {
1159 left_operand = convert (left_base_type, left_operand);
1160 right_operand = convert (right_base_type, right_operand);
1161 }
1162
1163 /* If both objects are fat pointers, compare them specially. */
1164 if (TYPE_IS_FAT_POINTER_P (left_base_type))
1165 {
1166 result
1167 = compare_fat_pointers (input_location,
1168 result_type, left_operand, right_operand);
1169 if (op_code == NE_EXPR)
1170 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1171 else
1172 gcc_assert (op_code == EQ_EXPR);
1173
1174 return result;
1175 }
1176
1177 modulus = NULL_TREE;
1178 break;
1179
1180 case LSHIFT_EXPR:
1181 case RSHIFT_EXPR:
1182 case LROTATE_EXPR:
1183 case RROTATE_EXPR:
1184 /* The RHS of a shift can be any type. Also, ignore any modulus
1185 (we used to abort, but this is needed for unchecked conversion
1186 to modular types). Otherwise, processing is the same as normal. */
1187 gcc_assert (operation_type == left_base_type);
1188 modulus = NULL_TREE;
1189 left_operand = convert (operation_type, left_operand);
1190 break;
1191
1192 case BIT_AND_EXPR:
1193 case BIT_IOR_EXPR:
1194 case BIT_XOR_EXPR:
1195 /* For binary modulus, if the inputs are in range, so are the
1196 outputs. */
1197 if (modulus && integer_pow2p (modulus))
1198 modulus = NULL_TREE;
1199 goto common;
1200
1201 case COMPLEX_EXPR:
1202 gcc_assert (TREE_TYPE (result_type) == left_base_type
1203 && TREE_TYPE (result_type) == right_base_type);
1204 left_operand = convert (left_base_type, left_operand);
1205 right_operand = convert (right_base_type, right_operand);
1206 break;
1207
1208 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1209 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1210 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1211 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1212 /* These always produce results lower than either operand. */
1213 modulus = NULL_TREE;
1214 goto common;
1215
1216 case POINTER_PLUS_EXPR:
1217 gcc_assert (operation_type == left_base_type
1218 && sizetype == right_base_type);
1219 left_operand = convert (operation_type, left_operand);
1220 right_operand = convert (sizetype, right_operand);
1221 break;
1222
1223 case PLUS_NOMOD_EXPR:
1224 case MINUS_NOMOD_EXPR:
1225 if (op_code == PLUS_NOMOD_EXPR)
1226 op_code = PLUS_EXPR;
1227 else
1228 op_code = MINUS_EXPR;
1229 modulus = NULL_TREE;
1230
1231 /* ... fall through ... */
1232
1233 case PLUS_EXPR:
1234 case MINUS_EXPR:
1235 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1236 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1237 these types but can generate addition/subtraction for Succ/Pred. */
1238 if (operation_type
1239 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1240 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1241 operation_type = left_base_type = right_base_type
1242 = gnat_type_for_mode (TYPE_MODE (operation_type),
1243 TYPE_UNSIGNED (operation_type));
1244
1245 /* ... fall through ... */
1246
1247 default:
1248 common:
1249 /* The result type should be the same as the base types of the
1250 both operands (and they should be the same). Convert
1251 everything to the result type. */
1252
1253 gcc_assert (operation_type == left_base_type
1254 && left_base_type == right_base_type);
1255 left_operand = convert (operation_type, left_operand);
1256 right_operand = convert (operation_type, right_operand);
1257 }
1258
1259 if (modulus && !integer_pow2p (modulus))
1260 {
1261 result = nonbinary_modular_operation (op_code, operation_type,
1262 left_operand, right_operand);
1263 modulus = NULL_TREE;
1264 }
1265 /* If either operand is a NULL_EXPR, just return a new one. */
1266 else if (TREE_CODE (left_operand) == NULL_EXPR)
1267 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1268 else if (TREE_CODE (right_operand) == NULL_EXPR)
1269 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1270 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1271 result = fold (build4 (op_code, operation_type, left_operand,
1272 right_operand, NULL_TREE, NULL_TREE));
1273 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1274 result = build2 (op_code, void_type_node, left_operand, right_operand);
1275 else
1276 result
1277 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1278
1279 if (TREE_CONSTANT (result))
1280 ;
1281 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1282 {
1283 if (TYPE_VOLATILE (operation_type))
1284 TREE_THIS_VOLATILE (result) = 1;
1285 }
1286 else
1287 TREE_CONSTANT (result)
1288 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1289
1290 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1291
1292 /* If we are working with modular types, perform the MOD operation
1293 if something above hasn't eliminated the need for it. */
1294 if (modulus)
1295 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1296 convert (operation_type, modulus));
1297
1298 if (result_type && result_type != operation_type)
1299 result = convert (result_type, result);
1300
1301 return result;
1302 }
1303 \f
1304 /* Similar, but for unary operations. */
1305
1306 tree
1307 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1308 {
1309 tree type = TREE_TYPE (operand);
1310 tree base_type = get_base_type (type);
1311 tree operation_type = result_type;
1312 tree result;
1313
1314 if (operation_type
1315 && TREE_CODE (operation_type) == RECORD_TYPE
1316 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1317 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1318
1319 if (operation_type
1320 && TREE_CODE (operation_type) == INTEGER_TYPE
1321 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1322 operation_type = get_base_type (operation_type);
1323
1324 switch (op_code)
1325 {
1326 case REALPART_EXPR:
1327 case IMAGPART_EXPR:
1328 if (!operation_type)
1329 result_type = operation_type = TREE_TYPE (type);
1330 else
1331 gcc_assert (result_type == TREE_TYPE (type));
1332
1333 result = fold_build1 (op_code, operation_type, operand);
1334 break;
1335
1336 case TRUTH_NOT_EXPR:
1337 #ifdef ENABLE_CHECKING
1338 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1339 #endif
1340 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1341 /* When not optimizing, fold the result as invert_truthvalue_loc
1342 doesn't fold the result of comparisons. This is intended to undo
1343 the trick used for boolean rvalues in gnat_to_gnu. */
1344 if (!optimize)
1345 result = fold (result);
1346 break;
1347
1348 case ATTR_ADDR_EXPR:
1349 case ADDR_EXPR:
1350 switch (TREE_CODE (operand))
1351 {
1352 case INDIRECT_REF:
1353 case UNCONSTRAINED_ARRAY_REF:
1354 result = TREE_OPERAND (operand, 0);
1355
1356 /* Make sure the type here is a pointer, not a reference.
1357 GCC wants pointer types for function addresses. */
1358 if (!result_type)
1359 result_type = build_pointer_type (type);
1360
1361 /* If the underlying object can alias everything, propagate the
1362 property since we are effectively retrieving the object. */
1363 if (POINTER_TYPE_P (TREE_TYPE (result))
1364 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1365 {
1366 if (TREE_CODE (result_type) == POINTER_TYPE
1367 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1368 result_type
1369 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1370 TYPE_MODE (result_type),
1371 true);
1372 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1373 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1374 result_type
1375 = build_reference_type_for_mode (TREE_TYPE (result_type),
1376 TYPE_MODE (result_type),
1377 true);
1378 }
1379 break;
1380
1381 case NULL_EXPR:
1382 result = operand;
1383 TREE_TYPE (result) = type = build_pointer_type (type);
1384 break;
1385
1386 case COMPOUND_EXPR:
1387 /* Fold a compound expression if it has unconstrained array type
1388 since the middle-end cannot handle it. But we don't it in the
1389 general case because it may introduce aliasing issues if the
1390 first operand is an indirect assignment and the second operand
1391 the corresponding address, e.g. for an allocator. */
1392 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1393 {
1394 result = build_unary_op (ADDR_EXPR, result_type,
1395 TREE_OPERAND (operand, 1));
1396 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1397 TREE_OPERAND (operand, 0), result);
1398 break;
1399 }
1400 goto common;
1401
1402 case ARRAY_REF:
1403 case ARRAY_RANGE_REF:
1404 case COMPONENT_REF:
1405 case BIT_FIELD_REF:
1406 /* If this is for 'Address, find the address of the prefix and add
1407 the offset to the field. Otherwise, do this the normal way. */
1408 if (op_code == ATTR_ADDR_EXPR)
1409 {
1410 HOST_WIDE_INT bitsize;
1411 HOST_WIDE_INT bitpos;
1412 tree offset, inner;
1413 machine_mode mode;
1414 int unsignedp, volatilep;
1415
1416 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1417 &mode, &unsignedp, &volatilep,
1418 false);
1419
1420 /* If INNER is a padding type whose field has a self-referential
1421 size, convert to that inner type. We know the offset is zero
1422 and we need to have that type visible. */
1423 if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
1424 && CONTAINS_PLACEHOLDER_P
1425 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1426 (TREE_TYPE (inner))))))
1427 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1428 inner);
1429
1430 /* Compute the offset as a byte offset from INNER. */
1431 if (!offset)
1432 offset = size_zero_node;
1433
1434 offset = size_binop (PLUS_EXPR, offset,
1435 size_int (bitpos / BITS_PER_UNIT));
1436
1437 /* Take the address of INNER, convert it to a pointer to our type
1438 and add the offset. */
1439 inner = build_unary_op (ADDR_EXPR,
1440 build_pointer_type (TREE_TYPE (operand)),
1441 inner);
1442 result = build_binary_op (POINTER_PLUS_EXPR, TREE_TYPE (inner),
1443 inner, offset);
1444 break;
1445 }
1446 goto common;
1447
1448 case CONSTRUCTOR:
1449 /* If this is just a constructor for a padded record, we can
1450 just take the address of the single field and convert it to
1451 a pointer to our type. */
1452 if (TYPE_IS_PADDING_P (type))
1453 {
1454 result
1455 = build_unary_op (ADDR_EXPR,
1456 build_pointer_type (TREE_TYPE (operand)),
1457 CONSTRUCTOR_ELT (operand, 0)->value);
1458 break;
1459 }
1460 goto common;
1461
1462 case NOP_EXPR:
1463 if (AGGREGATE_TYPE_P (type)
1464 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1465 return build_unary_op (ADDR_EXPR, result_type,
1466 TREE_OPERAND (operand, 0));
1467
1468 /* ... fallthru ... */
1469
1470 case VIEW_CONVERT_EXPR:
1471 /* If this just a variant conversion or if the conversion doesn't
1472 change the mode, get the result type from this type and go down.
1473 This is needed for conversions of CONST_DECLs, to eventually get
1474 to the address of their CORRESPONDING_VARs. */
1475 if ((TYPE_MAIN_VARIANT (type)
1476 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1477 || (TYPE_MODE (type) != BLKmode
1478 && (TYPE_MODE (type)
1479 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1480 return build_unary_op (ADDR_EXPR,
1481 (result_type ? result_type
1482 : build_pointer_type (type)),
1483 TREE_OPERAND (operand, 0));
1484 goto common;
1485
1486 case CONST_DECL:
1487 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1488
1489 /* ... fall through ... */
1490
1491 default:
1492 common:
1493
1494 /* If we are taking the address of a padded record whose field
1495 contains a template, take the address of the field. */
1496 if (TYPE_IS_PADDING_P (type)
1497 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1498 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1499 {
1500 type = TREE_TYPE (TYPE_FIELDS (type));
1501 operand = convert (type, operand);
1502 }
1503
1504 gnat_mark_addressable (operand);
1505 result = build_fold_addr_expr (operand);
1506 }
1507
1508 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1509 break;
1510
1511 case INDIRECT_REF:
1512 {
1513 tree t = remove_conversions (operand, false);
1514 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1515
1516 /* If TYPE is a thin pointer, either first retrieve the base if this
1517 is an expression with an offset built for the initialization of an
1518 object with an unconstrained nominal subtype, or else convert to
1519 the fat pointer. */
1520 if (TYPE_IS_THIN_POINTER_P (type))
1521 {
1522 tree rec_type = TREE_TYPE (type);
1523
1524 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1525 && TREE_OPERAND (operand, 1)
1526 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1527 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1528 {
1529 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1530 type = TREE_TYPE (operand);
1531 }
1532 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1533 {
1534 operand
1535 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1536 operand);
1537 type = TREE_TYPE (operand);
1538 }
1539 }
1540
1541 /* If we want to refer to an unconstrained array, use the appropriate
1542 expression. But this will never survive down to the back-end. */
1543 if (TYPE_IS_FAT_POINTER_P (type))
1544 {
1545 result = build1 (UNCONSTRAINED_ARRAY_REF,
1546 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1547 TREE_READONLY (result)
1548 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1549 }
1550
1551 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1552 else if (TREE_CODE (operand) == ADDR_EXPR)
1553 result = TREE_OPERAND (operand, 0);
1554
1555 /* Otherwise, build and fold the indirect reference. */
1556 else
1557 {
1558 result = build_fold_indirect_ref (operand);
1559 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1560 }
1561
1562 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1563 {
1564 TREE_SIDE_EFFECTS (result) = 1;
1565 if (TREE_CODE (result) == INDIRECT_REF)
1566 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1567 }
1568
1569 if ((TREE_CODE (result) == INDIRECT_REF
1570 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1571 && can_never_be_null)
1572 TREE_THIS_NOTRAP (result) = 1;
1573
1574 break;
1575 }
1576
1577 case NEGATE_EXPR:
1578 case BIT_NOT_EXPR:
1579 {
1580 tree modulus = ((operation_type
1581 && TREE_CODE (operation_type) == INTEGER_TYPE
1582 && TYPE_MODULAR_P (operation_type))
1583 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1584 int mod_pow2 = modulus && integer_pow2p (modulus);
1585
1586 /* If this is a modular type, there are various possibilities
1587 depending on the operation and whether the modulus is a
1588 power of two or not. */
1589
1590 if (modulus)
1591 {
1592 gcc_assert (operation_type == base_type);
1593 operand = convert (operation_type, operand);
1594
1595 /* The fastest in the negate case for binary modulus is
1596 the straightforward code; the TRUNC_MOD_EXPR below
1597 is an AND operation. */
1598 if (op_code == NEGATE_EXPR && mod_pow2)
1599 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1600 fold_build1 (NEGATE_EXPR, operation_type,
1601 operand),
1602 modulus);
1603
1604 /* For nonbinary negate case, return zero for zero operand,
1605 else return the modulus minus the operand. If the modulus
1606 is a power of two minus one, we can do the subtraction
1607 as an XOR since it is equivalent and faster on most machines. */
1608 else if (op_code == NEGATE_EXPR && !mod_pow2)
1609 {
1610 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1611 modulus,
1612 convert (operation_type,
1613 integer_one_node))))
1614 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1615 operand, modulus);
1616 else
1617 result = fold_build2 (MINUS_EXPR, operation_type,
1618 modulus, operand);
1619
1620 result = fold_build3 (COND_EXPR, operation_type,
1621 fold_build2 (NE_EXPR,
1622 boolean_type_node,
1623 operand,
1624 convert
1625 (operation_type,
1626 integer_zero_node)),
1627 result, operand);
1628 }
1629 else
1630 {
1631 /* For the NOT cases, we need a constant equal to
1632 the modulus minus one. For a binary modulus, we
1633 XOR against the constant and subtract the operand from
1634 that constant for nonbinary modulus. */
1635
1636 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1637 convert (operation_type,
1638 integer_one_node));
1639
1640 if (mod_pow2)
1641 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1642 operand, cnst);
1643 else
1644 result = fold_build2 (MINUS_EXPR, operation_type,
1645 cnst, operand);
1646 }
1647
1648 break;
1649 }
1650 }
1651
1652 /* ... fall through ... */
1653
1654 default:
1655 gcc_assert (operation_type == base_type);
1656 result = fold_build1 (op_code, operation_type,
1657 convert (operation_type, operand));
1658 }
1659
1660 if (result_type && TREE_TYPE (result) != result_type)
1661 result = convert (result_type, result);
1662
1663 return result;
1664 }
1665 \f
1666 /* Similar, but for COND_EXPR. */
1667
1668 tree
1669 build_cond_expr (tree result_type, tree condition_operand,
1670 tree true_operand, tree false_operand)
1671 {
1672 bool addr_p = false;
1673 tree result;
1674
1675 /* The front-end verified that result, true and false operands have
1676 same base type. Convert everything to the result type. */
1677 true_operand = convert (result_type, true_operand);
1678 false_operand = convert (result_type, false_operand);
1679
1680 /* If the result type is unconstrained, take the address of the operands and
1681 then dereference the result. Likewise if the result type is passed by
1682 reference, because creating a temporary of this type is not allowed. */
1683 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1684 || TYPE_IS_BY_REFERENCE_P (result_type)
1685 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1686 {
1687 result_type = build_pointer_type (result_type);
1688 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1689 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1690 addr_p = true;
1691 }
1692
1693 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1694 true_operand, false_operand);
1695
1696 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1697 in both arms, make sure it gets evaluated by moving it ahead of the
1698 conditional expression. This is necessary because it is evaluated
1699 in only one place at run time and would otherwise be uninitialized
1700 in one of the arms. */
1701 true_operand = skip_simple_arithmetic (true_operand);
1702 false_operand = skip_simple_arithmetic (false_operand);
1703
1704 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1705 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1706
1707 if (addr_p)
1708 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1709
1710 return result;
1711 }
1712
1713 /* Similar, but for COMPOUND_EXPR. */
1714
1715 tree
1716 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1717 {
1718 bool addr_p = false;
1719 tree result;
1720
1721 /* If the result type is unconstrained, take the address of the operand and
1722 then dereference the result. Likewise if the result type is passed by
1723 reference, but this is natively handled in the gimplifier. */
1724 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1725 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1726 {
1727 result_type = build_pointer_type (result_type);
1728 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1729 addr_p = true;
1730 }
1731
1732 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1733 expr_operand);
1734
1735 if (addr_p)
1736 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1737
1738 return result;
1739 }
1740 \f
1741 /* Conveniently construct a function call expression. FNDECL names the
1742 function to be called, N is the number of arguments, and the "..."
1743 parameters are the argument expressions. Unlike build_call_expr
1744 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1745
1746 tree
1747 build_call_n_expr (tree fndecl, int n, ...)
1748 {
1749 va_list ap;
1750 tree fntype = TREE_TYPE (fndecl);
1751 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1752
1753 va_start (ap, n);
1754 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1755 va_end (ap);
1756 return fn;
1757 }
1758 \f
1759 /* Call a function that raises an exception and pass the line number and file
1760 name, if requested. MSG says which exception function to call.
1761
1762 GNAT_NODE is the gnat node conveying the source location for which the
1763 error should be signaled, or Empty in which case the error is signaled on
1764 the current ref_file_name/input_line.
1765
1766 KIND says which kind of exception this is for
1767 (N_Raise_{Constraint,Storage,Program}_Error). */
1768
1769 tree
1770 build_call_raise (int msg, Node_Id gnat_node, char kind)
1771 {
1772 tree fndecl = gnat_raise_decls[msg];
1773 tree label = get_exception_label (kind);
1774 tree filename;
1775 int line_number;
1776 const char *str;
1777 int len;
1778
1779 /* If this is to be done as a goto, handle that case. */
1780 if (label)
1781 {
1782 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1783 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1784
1785 /* If Local_Raise is present, generate
1786 Local_Raise (exception'Identity); */
1787 if (Present (local_raise))
1788 {
1789 tree gnu_local_raise
1790 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1791 tree gnu_exception_entity
1792 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1793 tree gnu_call
1794 = build_call_n_expr (gnu_local_raise, 1,
1795 build_unary_op (ADDR_EXPR, NULL_TREE,
1796 gnu_exception_entity));
1797
1798 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1799 gnu_call, gnu_result);}
1800
1801 return gnu_result;
1802 }
1803
1804 str
1805 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1806 ? ""
1807 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1808 ? IDENTIFIER_POINTER
1809 (get_identifier (Get_Name_String
1810 (Debug_Source_Name
1811 (Get_Source_File_Index (Sloc (gnat_node))))))
1812 : ref_filename;
1813
1814 len = strlen (str);
1815 filename = build_string (len, str);
1816 line_number
1817 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1818 ? Get_Logical_Line_Number (Sloc(gnat_node))
1819 : LOCATION_LINE (input_location);
1820
1821 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1822 build_index_type (size_int (len)));
1823
1824 return
1825 build_call_n_expr (fndecl, 2,
1826 build1 (ADDR_EXPR,
1827 build_pointer_type (unsigned_char_type_node),
1828 filename),
1829 build_int_cst (NULL_TREE, line_number));
1830 }
1831
1832 /* Similar to build_call_raise, for an index or range check exception as
1833 determined by MSG, with extra information generated of the form
1834 "INDEX out of range FIRST..LAST". */
1835
1836 tree
1837 build_call_raise_range (int msg, Node_Id gnat_node,
1838 tree index, tree first, tree last)
1839 {
1840 tree fndecl = gnat_raise_decls_ext[msg];
1841 tree filename;
1842 int line_number, column_number;
1843 const char *str;
1844 int len;
1845
1846 str
1847 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1848 ? ""
1849 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1850 ? IDENTIFIER_POINTER
1851 (get_identifier (Get_Name_String
1852 (Debug_Source_Name
1853 (Get_Source_File_Index (Sloc (gnat_node))))))
1854 : ref_filename;
1855
1856 len = strlen (str);
1857 filename = build_string (len, str);
1858 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1859 {
1860 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1861 column_number = Get_Column_Number (Sloc (gnat_node));
1862 }
1863 else
1864 {
1865 line_number = LOCATION_LINE (input_location);
1866 column_number = 0;
1867 }
1868
1869 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1870 build_index_type (size_int (len)));
1871
1872 return
1873 build_call_n_expr (fndecl, 6,
1874 build1 (ADDR_EXPR,
1875 build_pointer_type (unsigned_char_type_node),
1876 filename),
1877 build_int_cst (NULL_TREE, line_number),
1878 build_int_cst (NULL_TREE, column_number),
1879 convert (integer_type_node, index),
1880 convert (integer_type_node, first),
1881 convert (integer_type_node, last));
1882 }
1883
1884 /* Similar to build_call_raise, with extra information about the column
1885 where the check failed. */
1886
1887 tree
1888 build_call_raise_column (int msg, Node_Id gnat_node)
1889 {
1890 tree fndecl = gnat_raise_decls_ext[msg];
1891 tree filename;
1892 int line_number, column_number;
1893 const char *str;
1894 int len;
1895
1896 str
1897 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1898 ? ""
1899 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1900 ? IDENTIFIER_POINTER
1901 (get_identifier (Get_Name_String
1902 (Debug_Source_Name
1903 (Get_Source_File_Index (Sloc (gnat_node))))))
1904 : ref_filename;
1905
1906 len = strlen (str);
1907 filename = build_string (len, str);
1908 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1909 {
1910 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1911 column_number = Get_Column_Number (Sloc (gnat_node));
1912 }
1913 else
1914 {
1915 line_number = LOCATION_LINE (input_location);
1916 column_number = 0;
1917 }
1918
1919 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1920 build_index_type (size_int (len)));
1921
1922 return
1923 build_call_n_expr (fndecl, 3,
1924 build1 (ADDR_EXPR,
1925 build_pointer_type (unsigned_char_type_node),
1926 filename),
1927 build_int_cst (NULL_TREE, line_number),
1928 build_int_cst (NULL_TREE, column_number));
1929 }
1930 \f
1931 /* qsort comparer for the bit positions of two constructor elements
1932 for record components. */
1933
1934 static int
1935 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1936 {
1937 const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
1938 const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
1939 const_tree const field1 = elmt1->index;
1940 const_tree const field2 = elmt2->index;
1941 const int ret
1942 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1943
1944 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1945 }
1946
1947 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1948
1949 tree
1950 gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
1951 {
1952 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1953 bool read_only = true;
1954 bool side_effects = false;
1955 tree result, obj, val;
1956 unsigned int n_elmts;
1957
1958 /* Scan the elements to see if they are all constant or if any has side
1959 effects, to let us set global flags on the resulting constructor. Count
1960 the elements along the way for possible sorting purposes below. */
1961 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1962 {
1963 /* The predicate must be in keeping with output_constructor. */
1964 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1965 || (TREE_CODE (type) == RECORD_TYPE
1966 && CONSTRUCTOR_BITFIELD_P (obj)
1967 && !initializer_constant_valid_for_bitfield_p (val))
1968 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1969 allconstant = false;
1970
1971 if (!TREE_READONLY (val))
1972 read_only = false;
1973
1974 if (TREE_SIDE_EFFECTS (val))
1975 side_effects = true;
1976 }
1977
1978 /* For record types with constant components only, sort field list
1979 by increasing bit position. This is necessary to ensure the
1980 constructor can be output as static data. */
1981 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1982 v->qsort (compare_elmt_bitpos);
1983
1984 result = build_constructor (type, v);
1985 CONSTRUCTOR_NO_CLEARING (result) = 1;
1986 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1987 TREE_SIDE_EFFECTS (result) = side_effects;
1988 TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
1989 return result;
1990 }
1991 \f
1992 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1993 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1994 for the field. Don't fold the result if NO_FOLD_P is true.
1995
1996 We also handle the fact that we might have been passed a pointer to the
1997 actual record and know how to look for fields in variant parts. */
1998
1999 static tree
2000 build_simple_component_ref (tree record_variable, tree component, tree field,
2001 bool no_fold_p)
2002 {
2003 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
2004 tree base, ref;
2005
2006 gcc_assert (RECORD_OR_UNION_TYPE_P (record_type)
2007 && COMPLETE_TYPE_P (record_type)
2008 && (component == NULL_TREE) != (field == NULL_TREE));
2009
2010 /* If no field was specified, look for a field with the specified name in
2011 the current record only. */
2012 if (!field)
2013 for (field = TYPE_FIELDS (record_type);
2014 field;
2015 field = DECL_CHAIN (field))
2016 if (DECL_NAME (field) == component)
2017 break;
2018
2019 if (!field)
2020 return NULL_TREE;
2021
2022 /* If this field is not in the specified record, see if we can find a field
2023 in the specified record whose original field is the same as this one. */
2024 if (DECL_CONTEXT (field) != record_type)
2025 {
2026 tree new_field;
2027
2028 /* First loop through normal components. */
2029 for (new_field = TYPE_FIELDS (record_type);
2030 new_field;
2031 new_field = DECL_CHAIN (new_field))
2032 if (SAME_FIELD_P (field, new_field))
2033 break;
2034
2035 /* Next, see if we're looking for an inherited component in an extension.
2036 If so, look through the extension directly, unless the type contains
2037 a placeholder, as it might be needed for a later substitution. */
2038 if (!new_field
2039 && TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
2040 && TYPE_ALIGN_OK (record_type)
2041 && !type_contains_placeholder_p (record_type)
2042 && TREE_CODE (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
2043 == RECORD_TYPE
2044 && TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (record_variable, 0))))
2045 {
2046 ref = build_simple_component_ref (TREE_OPERAND (record_variable, 0),
2047 NULL_TREE, field, no_fold_p);
2048 if (ref)
2049 return ref;
2050 }
2051
2052 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
2053 component in the first search. Doing this search in two steps is
2054 required to avoid hidden homonymous fields in the _Parent field. */
2055 if (!new_field)
2056 for (new_field = TYPE_FIELDS (record_type);
2057 new_field;
2058 new_field = DECL_CHAIN (new_field))
2059 if (DECL_INTERNAL_P (new_field))
2060 {
2061 tree field_ref
2062 = build_simple_component_ref (record_variable,
2063 NULL_TREE, new_field, no_fold_p);
2064 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
2065 no_fold_p);
2066 if (ref)
2067 return ref;
2068 }
2069
2070 field = new_field;
2071 }
2072
2073 if (!field)
2074 return NULL_TREE;
2075
2076 /* If the field's offset has overflowed, do not try to access it, as doing
2077 so may trigger sanity checks deeper in the back-end. Note that we don't
2078 need to warn since this will be done on trying to declare the object. */
2079 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
2080 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
2081 return NULL_TREE;
2082
2083 /* We have found a suitable field. Before building the COMPONENT_REF, get
2084 the base object of the record variable if possible. */
2085 base = record_variable;
2086
2087 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR)
2088 {
2089 tree inner_variable = TREE_OPERAND (record_variable, 0);
2090 tree inner_type = TYPE_MAIN_VARIANT (TREE_TYPE (inner_variable));
2091
2092 /* Look through a conversion between type variants. This is transparent
2093 as far as the field is concerned. */
2094 if (inner_type == record_type)
2095 base = inner_variable;
2096
2097 /* Look through a conversion between original and packable version, but
2098 the field needs to be adjusted in this case. */
2099 else if (RECORD_OR_UNION_TYPE_P (inner_type)
2100 && TYPE_NAME (inner_type) == TYPE_NAME (record_type))
2101 {
2102 tree new_field;
2103
2104 for (new_field = TYPE_FIELDS (inner_type);
2105 new_field;
2106 new_field = DECL_CHAIN (new_field))
2107 if (SAME_FIELD_P (field, new_field))
2108 break;
2109 if (new_field)
2110 {
2111 field = new_field;
2112 base = inner_variable;
2113 }
2114 }
2115 }
2116
2117 ref = build3 (COMPONENT_REF, TREE_TYPE (field), base, field, NULL_TREE);
2118
2119 if (TREE_READONLY (record_variable)
2120 || TREE_READONLY (field)
2121 || TYPE_READONLY (record_type))
2122 TREE_READONLY (ref) = 1;
2123
2124 if (TREE_THIS_VOLATILE (record_variable)
2125 || TREE_THIS_VOLATILE (field)
2126 || TYPE_VOLATILE (record_type))
2127 TREE_THIS_VOLATILE (ref) = 1;
2128
2129 if (no_fold_p)
2130 return ref;
2131
2132 /* The generic folder may punt in this case because the inner array type
2133 can be self-referential, but folding is in fact not problematic. */
2134 if (TREE_CODE (base) == CONSTRUCTOR
2135 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (base)))
2136 {
2137 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (base);
2138 unsigned HOST_WIDE_INT idx;
2139 tree index, value;
2140 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2141 if (index == field)
2142 return value;
2143 return ref;
2144 }
2145
2146 return fold (ref);
2147 }
2148 \f
2149 /* Likewise, but generate a Constraint_Error if the reference could not be
2150 found. */
2151
2152 tree
2153 build_component_ref (tree record_variable, tree component, tree field,
2154 bool no_fold_p)
2155 {
2156 tree ref = build_simple_component_ref (record_variable, component, field,
2157 no_fold_p);
2158 if (ref)
2159 return ref;
2160
2161 /* If FIELD was specified, assume this is an invalid user field so raise
2162 Constraint_Error. Otherwise, we have no type to return so abort. */
2163 gcc_assert (field);
2164 return build1 (NULL_EXPR, TREE_TYPE (field),
2165 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2166 N_Raise_Constraint_Error));
2167 }
2168 \f
2169 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2170 identically. Process the case where a GNAT_PROC to call is provided. */
2171
2172 static inline tree
2173 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2174 Entity_Id gnat_proc, Entity_Id gnat_pool)
2175 {
2176 tree gnu_proc = gnat_to_gnu (gnat_proc);
2177 tree gnu_call;
2178
2179 /* A storage pool's underlying type is a record type (for both predefined
2180 storage pools and GNAT simple storage pools). The secondary stack uses
2181 the same mechanism, but its pool object (SS_Pool) is an integer. */
2182 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2183 {
2184 /* The size is the third parameter; the alignment is the
2185 same type. */
2186 Entity_Id gnat_size_type
2187 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2188 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2189
2190 tree gnu_pool = gnat_to_gnu (gnat_pool);
2191 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2192 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2193
2194 gnu_size = convert (gnu_size_type, gnu_size);
2195 gnu_align = convert (gnu_size_type, gnu_align);
2196
2197 /* The first arg is always the address of the storage pool; next
2198 comes the address of the object, for a deallocator, then the
2199 size and alignment. */
2200 if (gnu_obj)
2201 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2202 gnu_size, gnu_align);
2203 else
2204 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2205 gnu_size, gnu_align);
2206 }
2207
2208 /* Secondary stack case. */
2209 else
2210 {
2211 /* The size is the second parameter. */
2212 Entity_Id gnat_size_type
2213 = Etype (Next_Formal (First_Formal (gnat_proc)));
2214 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2215
2216 gnu_size = convert (gnu_size_type, gnu_size);
2217
2218 /* The first arg is the address of the object, for a deallocator,
2219 then the size. */
2220 if (gnu_obj)
2221 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2222 else
2223 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2224 }
2225
2226 return gnu_call;
2227 }
2228
2229 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2230 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2231 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2232 latter offers. */
2233
2234 static inline tree
2235 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2236 {
2237 /* When the DATA_TYPE alignment is stricter than what malloc offers
2238 (super-aligned case), we allocate an "aligning" wrapper type and return
2239 the address of its single data field with the malloc's return value
2240 stored just in front. */
2241
2242 unsigned int data_align = TYPE_ALIGN (data_type);
2243 unsigned int system_allocator_alignment
2244 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2245
2246 tree aligning_type
2247 = ((data_align > system_allocator_alignment)
2248 ? make_aligning_type (data_type, data_align, data_size,
2249 system_allocator_alignment,
2250 POINTER_SIZE / BITS_PER_UNIT,
2251 gnat_node)
2252 : NULL_TREE);
2253
2254 tree size_to_malloc
2255 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2256
2257 tree malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2258
2259 if (aligning_type)
2260 {
2261 /* Latch malloc's return value and get a pointer to the aligning field
2262 first. */
2263 tree storage_ptr = gnat_protect_expr (malloc_ptr);
2264
2265 tree aligning_record_addr
2266 = convert (build_pointer_type (aligning_type), storage_ptr);
2267
2268 tree aligning_record
2269 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2270
2271 tree aligning_field
2272 = build_component_ref (aligning_record, NULL_TREE,
2273 TYPE_FIELDS (aligning_type), false);
2274
2275 tree aligning_field_addr
2276 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2277
2278 /* Then arrange to store the allocator's return value ahead
2279 and return. */
2280 tree storage_ptr_slot_addr
2281 = build_binary_op (POINTER_PLUS_EXPR, ptr_type_node,
2282 convert (ptr_type_node, aligning_field_addr),
2283 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2284 / BITS_PER_UNIT));
2285
2286 tree storage_ptr_slot
2287 = build_unary_op (INDIRECT_REF, NULL_TREE,
2288 convert (build_pointer_type (ptr_type_node),
2289 storage_ptr_slot_addr));
2290
2291 return
2292 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2293 build_binary_op (INIT_EXPR, NULL_TREE,
2294 storage_ptr_slot, storage_ptr),
2295 aligning_field_addr);
2296 }
2297 else
2298 return malloc_ptr;
2299 }
2300
2301 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2302 designated by DATA_PTR using the __gnat_free entry point. */
2303
2304 static inline tree
2305 maybe_wrap_free (tree data_ptr, tree data_type)
2306 {
2307 /* In the regular alignment case, we pass the data pointer straight to free.
2308 In the superaligned case, we need to retrieve the initial allocator
2309 return value, stored in front of the data block at allocation time. */
2310
2311 unsigned int data_align = TYPE_ALIGN (data_type);
2312 unsigned int system_allocator_alignment
2313 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2314
2315 tree free_ptr;
2316
2317 if (data_align > system_allocator_alignment)
2318 {
2319 /* DATA_FRONT_PTR (void *)
2320 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2321 tree data_front_ptr
2322 = build_binary_op
2323 (POINTER_PLUS_EXPR, ptr_type_node,
2324 convert (ptr_type_node, data_ptr),
2325 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2326
2327 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2328 free_ptr
2329 = build_unary_op
2330 (INDIRECT_REF, NULL_TREE,
2331 convert (build_pointer_type (ptr_type_node), data_front_ptr));
2332 }
2333 else
2334 free_ptr = data_ptr;
2335
2336 return build_call_n_expr (free_decl, 1, free_ptr);
2337 }
2338
2339 /* Build a GCC tree to call an allocation or deallocation function.
2340 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2341 generate an allocator.
2342
2343 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2344 object type, used to determine the to-be-honored address alignment.
2345 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2346 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2347 to provide an error location for restriction violation messages. */
2348
2349 tree
2350 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2351 Entity_Id gnat_proc, Entity_Id gnat_pool,
2352 Node_Id gnat_node)
2353 {
2354 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
2355
2356 /* Explicit proc to call ? This one is assumed to deal with the type
2357 alignment constraints. */
2358 if (Present (gnat_proc))
2359 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2360 gnat_proc, gnat_pool);
2361
2362 /* Otherwise, object to "free" or "malloc" with possible special processing
2363 for alignments stricter than what the default allocator honors. */
2364 else if (gnu_obj)
2365 return maybe_wrap_free (gnu_obj, gnu_type);
2366 else
2367 {
2368 /* Assert that we no longer can be called with this special pool. */
2369 gcc_assert (gnat_pool != -1);
2370
2371 /* Check that we aren't violating the associated restriction. */
2372 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2373 Check_No_Implicit_Heap_Alloc (gnat_node);
2374
2375 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2376 }
2377 }
2378 \f
2379 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2380 initial value is INIT, if INIT is nonzero. Convert the expression to
2381 RESULT_TYPE, which must be some pointer type, and return the result.
2382
2383 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2384 the storage pool to use. GNAT_NODE is used to provide an error
2385 location for restriction violation messages. If IGNORE_INIT_TYPE is
2386 true, ignore the type of INIT for the purpose of determining the size;
2387 this will cause the maximum size to be allocated if TYPE is of
2388 self-referential size. */
2389
2390 tree
2391 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2392 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2393 {
2394 tree size, storage, storage_deref, storage_init;
2395
2396 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2397 if (init && TREE_CODE (init) == NULL_EXPR)
2398 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2399
2400 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2401 else if (init && TREE_CODE (init) == COND_EXPR)
2402 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2403 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2404 gnat_proc, gnat_pool, gnat_node,
2405 ignore_init_type),
2406 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2407 gnat_proc, gnat_pool, gnat_node,
2408 ignore_init_type));
2409
2410 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2411 sizes of the object and its template. Allocate the whole thing and
2412 fill in the parts that are known. */
2413 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2414 {
2415 tree storage_type
2416 = build_unc_object_type_from_ptr (result_type, type,
2417 get_identifier ("ALLOC"), false);
2418 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2419 tree storage_ptr_type = build_pointer_type (storage_type);
2420
2421 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2422 init);
2423
2424 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2425 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2426 size = size_int (-1);
2427
2428 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2429 gnat_proc, gnat_pool, gnat_node);
2430 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2431 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2432 TREE_THIS_NOTRAP (storage_deref) = 1;
2433
2434 /* If there is an initializing expression, then make a constructor for
2435 the entire object including the bounds and copy it into the object.
2436 If there is no initializing expression, just set the bounds. */
2437 if (init)
2438 {
2439 vec<constructor_elt, va_gc> *v;
2440 vec_alloc (v, 2);
2441
2442 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2443 build_template (template_type, type, init));
2444 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2445 init);
2446 storage_init
2447 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2448 gnat_build_constructor (storage_type, v));
2449 }
2450 else
2451 storage_init
2452 = build_binary_op (INIT_EXPR, NULL_TREE,
2453 build_component_ref (storage_deref, NULL_TREE,
2454 TYPE_FIELDS (storage_type),
2455 false),
2456 build_template (template_type, type, NULL_TREE));
2457
2458 return build2 (COMPOUND_EXPR, result_type,
2459 storage_init, convert (result_type, storage));
2460 }
2461
2462 size = TYPE_SIZE_UNIT (type);
2463
2464 /* If we have an initializing expression, see if its size is simpler
2465 than the size from the type. */
2466 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2467 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2468 || CONTAINS_PLACEHOLDER_P (size)))
2469 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2470
2471 /* If the size is still self-referential, reference the initializing
2472 expression, if it is present. If not, this must have been a
2473 call to allocate a library-level object, in which case we use
2474 the maximum size. */
2475 if (CONTAINS_PLACEHOLDER_P (size))
2476 {
2477 if (!ignore_init_type && init)
2478 size = substitute_placeholder_in_expr (size, init);
2479 else
2480 size = max_size (size, true);
2481 }
2482
2483 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2484 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2485 size = size_int (-1);
2486
2487 storage = convert (result_type,
2488 build_call_alloc_dealloc (NULL_TREE, size, type,
2489 gnat_proc, gnat_pool,
2490 gnat_node));
2491
2492 /* If we have an initial value, protect the new address, assign the value
2493 and return the address with a COMPOUND_EXPR. */
2494 if (init)
2495 {
2496 storage = gnat_protect_expr (storage);
2497 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2498 TREE_THIS_NOTRAP (storage_deref) = 1;
2499 storage_init
2500 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2501 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2502 }
2503
2504 return storage;
2505 }
2506 \f
2507 /* Indicate that we need to take the address of T and that it therefore
2508 should not be allocated in a register. Returns true if successful. */
2509
2510 bool
2511 gnat_mark_addressable (tree t)
2512 {
2513 while (true)
2514 switch (TREE_CODE (t))
2515 {
2516 case ADDR_EXPR:
2517 case COMPONENT_REF:
2518 case ARRAY_REF:
2519 case ARRAY_RANGE_REF:
2520 case REALPART_EXPR:
2521 case IMAGPART_EXPR:
2522 case VIEW_CONVERT_EXPR:
2523 case NON_LVALUE_EXPR:
2524 CASE_CONVERT:
2525 t = TREE_OPERAND (t, 0);
2526 break;
2527
2528 case COMPOUND_EXPR:
2529 t = TREE_OPERAND (t, 1);
2530 break;
2531
2532 case CONSTRUCTOR:
2533 TREE_ADDRESSABLE (t) = 1;
2534 return true;
2535
2536 case VAR_DECL:
2537 case PARM_DECL:
2538 case RESULT_DECL:
2539 TREE_ADDRESSABLE (t) = 1;
2540 return true;
2541
2542 case FUNCTION_DECL:
2543 TREE_ADDRESSABLE (t) = 1;
2544 return true;
2545
2546 case CONST_DECL:
2547 return DECL_CONST_CORRESPONDING_VAR (t)
2548 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2549
2550 default:
2551 return true;
2552 }
2553 }
2554 \f
2555 /* Return true if EXP is a stable expression for the purpose of the functions
2556 below and, therefore, can be returned unmodified by them. We accept things
2557 that are actual constants or that have already been handled. */
2558
2559 static bool
2560 gnat_stable_expr_p (tree exp)
2561 {
2562 enum tree_code code = TREE_CODE (exp);
2563 return TREE_CONSTANT (exp) || code == NULL_EXPR || code == SAVE_EXPR;
2564 }
2565
2566 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2567 but we know how to handle our own nodes. */
2568
2569 tree
2570 gnat_save_expr (tree exp)
2571 {
2572 tree type = TREE_TYPE (exp);
2573 enum tree_code code = TREE_CODE (exp);
2574
2575 if (gnat_stable_expr_p (exp))
2576 return exp;
2577
2578 if (code == UNCONSTRAINED_ARRAY_REF)
2579 {
2580 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2581 TREE_READONLY (t) = TYPE_READONLY (type);
2582 return t;
2583 }
2584
2585 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2586 This may be more efficient, but will also allow us to more easily find
2587 the match for the PLACEHOLDER_EXPR. */
2588 if (code == COMPONENT_REF
2589 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2590 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2591 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2592
2593 return save_expr (exp);
2594 }
2595
2596 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2597 is optimized under the assumption that EXP's value doesn't change before
2598 its subsequent reuse(s) except through its potential reevaluation. */
2599
2600 tree
2601 gnat_protect_expr (tree exp)
2602 {
2603 tree type = TREE_TYPE (exp);
2604 enum tree_code code = TREE_CODE (exp);
2605
2606 if (gnat_stable_expr_p (exp))
2607 return exp;
2608
2609 /* If EXP has no side effects, we theoretically don't need to do anything.
2610 However, we may be recursively passed more and more complex expressions
2611 involving checks which will be reused multiple times and eventually be
2612 unshared for gimplification; in order to avoid a complexity explosion
2613 at that point, we protect any expressions more complex than a simple
2614 arithmetic expression. */
2615 if (!TREE_SIDE_EFFECTS (exp))
2616 {
2617 tree inner = skip_simple_arithmetic (exp);
2618 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2619 return exp;
2620 }
2621
2622 /* If this is a conversion, protect what's inside the conversion. */
2623 if (code == NON_LVALUE_EXPR
2624 || CONVERT_EXPR_CODE_P (code)
2625 || code == VIEW_CONVERT_EXPR)
2626 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2627
2628 /* If we're indirectly referencing something, we only need to protect the
2629 address since the data itself can't change in these situations. */
2630 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2631 {
2632 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2633 TREE_READONLY (t) = TYPE_READONLY (type);
2634 return t;
2635 }
2636
2637 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2638 This may be more efficient, but will also allow us to more easily find
2639 the match for the PLACEHOLDER_EXPR. */
2640 if (code == COMPONENT_REF
2641 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2642 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2643 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2644
2645 /* If this is a fat pointer or something that can be placed in a register,
2646 just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
2647 returned via invisible reference in most ABIs so the temporary will
2648 directly be filled by the callee. */
2649 if (TYPE_IS_FAT_POINTER_P (type)
2650 || TYPE_MODE (type) != BLKmode
2651 || code == CALL_EXPR)
2652 return save_expr (exp);
2653
2654 /* Otherwise reference, protect the address and dereference. */
2655 return
2656 build_unary_op (INDIRECT_REF, type,
2657 save_expr (build_unary_op (ADDR_EXPR,
2658 build_reference_type (type),
2659 exp)));
2660 }
2661
2662 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2663 argument to force evaluation of everything. */
2664
2665 static tree
2666 gnat_stabilize_reference_1 (tree e, void *data, int n)
2667 {
2668 const bool force = *(bool *)data;
2669 enum tree_code code = TREE_CODE (e);
2670 tree type = TREE_TYPE (e);
2671 tree result;
2672
2673 if (gnat_stable_expr_p (e))
2674 return e;
2675
2676 switch (TREE_CODE_CLASS (code))
2677 {
2678 case tcc_exceptional:
2679 case tcc_declaration:
2680 case tcc_comparison:
2681 case tcc_expression:
2682 case tcc_reference:
2683 case tcc_vl_exp:
2684 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2685 fat pointer. This may be more efficient, but will also allow
2686 us to more easily find the match for the PLACEHOLDER_EXPR. */
2687 if (code == COMPONENT_REF
2688 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2689 result
2690 = build3 (code, type,
2691 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data, n),
2692 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2693 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2694 so that it will only be evaluated once. */
2695 /* The tcc_reference and tcc_comparison classes could be handled as
2696 below, but it is generally faster to only evaluate them once. */
2697 else if (TREE_SIDE_EFFECTS (e) || force)
2698 return save_expr (e);
2699 else
2700 return e;
2701 break;
2702
2703 case tcc_binary:
2704 /* Recursively stabilize each operand. */
2705 result
2706 = build2 (code, type,
2707 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data, n),
2708 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), data, n));
2709 break;
2710
2711 case tcc_unary:
2712 /* Recursively stabilize each operand. */
2713 result
2714 = build1 (code, type,
2715 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data, n));
2716 break;
2717
2718 default:
2719 gcc_unreachable ();
2720 }
2721
2722 TREE_READONLY (result) = TREE_READONLY (e);
2723 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2724 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2725
2726 return result;
2727 }
2728
2729 /* This is equivalent to stabilize_reference in tree.c but we know how to
2730 handle our own nodes and we take extra arguments. FORCE says whether to
2731 force evaluation of everything. */
2732
2733 tree
2734 gnat_stabilize_reference (tree ref, bool force)
2735 {
2736 return gnat_rewrite_reference (ref, gnat_stabilize_reference_1, &force);
2737 }
2738
2739 /* Rewrite reference REF and call FUNC on each expression within REF in the
2740 process. DATA is passed unmodified to FUNC and N is bumped each time it
2741 is passed to FUNC, so FUNC is guaranteed to see a given N only once per
2742 reference to be rewritten. */
2743
2744 tree
2745 gnat_rewrite_reference (tree ref, rewrite_fn func, void *data, int n)
2746 {
2747 tree type = TREE_TYPE (ref);
2748 enum tree_code code = TREE_CODE (ref);
2749 tree result;
2750
2751 switch (code)
2752 {
2753 case CONST_DECL:
2754 case VAR_DECL:
2755 case PARM_DECL:
2756 case RESULT_DECL:
2757 /* No action is needed in this case. */
2758 return ref;
2759
2760 CASE_CONVERT:
2761 case FLOAT_EXPR:
2762 case FIX_TRUNC_EXPR:
2763 case VIEW_CONVERT_EXPR:
2764 result
2765 = build1 (code, type,
2766 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2767 n));
2768 break;
2769
2770 case INDIRECT_REF:
2771 case UNCONSTRAINED_ARRAY_REF:
2772 result = build1 (code, type, func (TREE_OPERAND (ref, 0), data, n));
2773 break;
2774
2775 case COMPONENT_REF:
2776 result = build3 (COMPONENT_REF, type,
2777 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2778 data, n),
2779 TREE_OPERAND (ref, 1), NULL_TREE);
2780 break;
2781
2782 case BIT_FIELD_REF:
2783 result = build3 (BIT_FIELD_REF, type,
2784 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2785 data, n),
2786 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
2787 break;
2788
2789 case ARRAY_REF:
2790 case ARRAY_RANGE_REF:
2791 result
2792 = build4 (code, type,
2793 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2794 n + 1),
2795 func (TREE_OPERAND (ref, 1), data, n),
2796 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
2797 break;
2798
2799 case CALL_EXPR:
2800 {
2801 /* This can only be an atomic load. */
2802 gcc_assert (call_is_atomic_load (ref));
2803
2804 /* An atomic load is an INDIRECT_REF of its first argument. */
2805 tree t = CALL_EXPR_ARG (ref, 0);
2806 if (TREE_CODE (t) == NOP_EXPR)
2807 t = TREE_OPERAND (t, 0);
2808 if (TREE_CODE (t) == ADDR_EXPR)
2809 t = build1 (ADDR_EXPR, TREE_TYPE (t),
2810 gnat_rewrite_reference (TREE_OPERAND (t, 0), func, data,
2811 n));
2812 else
2813 t = func (t, data, n);
2814 t = fold_convert (TREE_TYPE (CALL_EXPR_ARG (ref, 0)), t);
2815
2816 result = build_call_expr (TREE_OPERAND (CALL_EXPR_FN (ref), 0), 2,
2817 t, CALL_EXPR_ARG (ref, 1));
2818 }
2819 break;
2820
2821 case ERROR_MARK:
2822 return error_mark_node;
2823
2824 default:
2825 gcc_unreachable ();
2826 }
2827
2828 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2829 may not be sustained across some paths, such as the way via build1 for
2830 INDIRECT_REF. We reset those flags here in the general case, which is
2831 consistent with the GCC version of this routine.
2832
2833 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2834 paths introduce side-effects where there was none initially (e.g. if a
2835 SAVE_EXPR is built) and we also want to keep track of that. */
2836 TREE_READONLY (result) = TREE_READONLY (ref);
2837 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2838 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2839
2840 if (code == INDIRECT_REF
2841 || code == UNCONSTRAINED_ARRAY_REF
2842 || code == ARRAY_REF
2843 || code == ARRAY_RANGE_REF)
2844 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2845
2846 return result;
2847 }
2848
2849 /* This is equivalent to get_inner_reference in expr.c but it returns the
2850 ultimate containing object only if the reference (lvalue) is constant,
2851 i.e. if it doesn't depend on the context in which it is evaluated. */
2852
2853 tree
2854 get_inner_constant_reference (tree exp)
2855 {
2856 while (true)
2857 {
2858 switch (TREE_CODE (exp))
2859 {
2860 case BIT_FIELD_REF:
2861 break;
2862
2863 case COMPONENT_REF:
2864 if (TREE_OPERAND (exp, 2) != NULL_TREE)
2865 return NULL_TREE;
2866
2867 if (!TREE_CONSTANT (DECL_FIELD_OFFSET (TREE_OPERAND (exp, 1))))
2868 return NULL_TREE;
2869 break;
2870
2871 case ARRAY_REF:
2872 case ARRAY_RANGE_REF:
2873 {
2874 if (TREE_OPERAND (exp, 2) != NULL_TREE
2875 || TREE_OPERAND (exp, 3) != NULL_TREE)
2876 return NULL_TREE;
2877
2878 tree array_type = TREE_TYPE (TREE_OPERAND (exp, 0));
2879 if (!TREE_CONSTANT (TREE_OPERAND (exp, 1))
2880 || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
2881 || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type))))
2882 return NULL_TREE;
2883 }
2884 break;
2885
2886 case REALPART_EXPR:
2887 case IMAGPART_EXPR:
2888 case VIEW_CONVERT_EXPR:
2889 break;
2890
2891 default:
2892 goto done;
2893 }
2894
2895 exp = TREE_OPERAND (exp, 0);
2896 }
2897
2898 done:
2899 return exp;
2900 }
2901
2902 /* If EXPR is an expression that is invariant in the current function, in the
2903 sense that it can be evaluated anywhere in the function and any number of
2904 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2905
2906 tree
2907 gnat_invariant_expr (tree expr)
2908 {
2909 tree type = TREE_TYPE (expr), t;
2910
2911 expr = remove_conversions (expr, false);
2912
2913 while ((TREE_CODE (expr) == CONST_DECL
2914 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2915 && decl_function_context (expr) == current_function_decl
2916 && DECL_INITIAL (expr))
2917 {
2918 expr = DECL_INITIAL (expr);
2919 /* Look into CONSTRUCTORs built to initialize padded types. */
2920 if (TYPE_IS_PADDING_P (TREE_TYPE (expr)))
2921 expr = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (expr))), expr);
2922 expr = remove_conversions (expr, false);
2923 }
2924
2925 /* We are only interested in scalar types at the moment and, even if we may
2926 have gone through padding types in the above loop, we must be back to a
2927 scalar value at this point. */
2928 if (AGGREGATE_TYPE_P (TREE_TYPE (expr)))
2929 return NULL_TREE;
2930
2931 if (TREE_CONSTANT (expr))
2932 return fold_convert (type, expr);
2933
2934 t = expr;
2935
2936 while (true)
2937 {
2938 switch (TREE_CODE (t))
2939 {
2940 case COMPONENT_REF:
2941 if (TREE_OPERAND (t, 2) != NULL_TREE)
2942 return NULL_TREE;
2943 break;
2944
2945 case ARRAY_REF:
2946 case ARRAY_RANGE_REF:
2947 if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
2948 || TREE_OPERAND (t, 2) != NULL_TREE
2949 || TREE_OPERAND (t, 3) != NULL_TREE)
2950 return NULL_TREE;
2951 break;
2952
2953 case BIT_FIELD_REF:
2954 case VIEW_CONVERT_EXPR:
2955 case REALPART_EXPR:
2956 case IMAGPART_EXPR:
2957 break;
2958
2959 case INDIRECT_REF:
2960 if (!TREE_READONLY (t)
2961 || TREE_SIDE_EFFECTS (t)
2962 || !TREE_THIS_NOTRAP (t))
2963 return NULL_TREE;
2964 break;
2965
2966 default:
2967 goto object;
2968 }
2969
2970 t = TREE_OPERAND (t, 0);
2971 }
2972
2973 object:
2974 if (TREE_SIDE_EFFECTS (t))
2975 return NULL_TREE;
2976
2977 if (TREE_CODE (t) == CONST_DECL
2978 && (DECL_EXTERNAL (t)
2979 || decl_function_context (t) != current_function_decl))
2980 return fold_convert (type, expr);
2981
2982 if (!TREE_READONLY (t))
2983 return NULL_TREE;
2984
2985 if (TREE_CODE (t) == PARM_DECL)
2986 return fold_convert (type, expr);
2987
2988 if (TREE_CODE (t) == VAR_DECL
2989 && (DECL_EXTERNAL (t)
2990 || decl_function_context (t) != current_function_decl))
2991 return fold_convert (type, expr);
2992
2993 return NULL_TREE;
2994 }
This page took 0.172413 seconds and 6 git commands to generate.