]> gcc.gnu.org Git - gcc.git/blob - gcc/ada/gcc-interface/utils2.c
re PR ada/48835 (porting GNAT to m68k-linux)
[gcc.git] / gcc / ada / gcc-interface / utils2.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2016, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "vec.h"
31 #include "alias.h"
32 #include "tree.h"
33 #include "inchash.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stringpool.h"
37 #include "varasm.h"
38 #include "flags.h"
39 #include "toplev.h"
40 #include "ggc.h"
41 #include "tree-inline.h"
42
43 #include "ada.h"
44 #include "types.h"
45 #include "atree.h"
46 #include "elists.h"
47 #include "namet.h"
48 #include "nlists.h"
49 #include "snames.h"
50 #include "stringt.h"
51 #include "uintp.h"
52 #include "fe.h"
53 #include "sinfo.h"
54 #include "einfo.h"
55 #include "ada-tree.h"
56 #include "gigi.h"
57
58 /* Return the base type of TYPE. */
59
60 tree
61 get_base_type (tree type)
62 {
63 if (TREE_CODE (type) == RECORD_TYPE
64 && TYPE_JUSTIFIED_MODULAR_P (type))
65 type = TREE_TYPE (TYPE_FIELDS (type));
66
67 while (TREE_TYPE (type)
68 && (TREE_CODE (type) == INTEGER_TYPE
69 || TREE_CODE (type) == REAL_TYPE))
70 type = TREE_TYPE (type);
71
72 return type;
73 }
74 \f
75 /* EXP is a GCC tree representing an address. See if we can find how strictly
76 the object at this address is aligned and, if so, return the alignment of
77 the object in bits. Otherwise return 0. */
78
79 unsigned int
80 known_alignment (tree exp)
81 {
82 unsigned int this_alignment;
83 unsigned int lhs, rhs;
84
85 switch (TREE_CODE (exp))
86 {
87 CASE_CONVERT:
88 case VIEW_CONVERT_EXPR:
89 case NON_LVALUE_EXPR:
90 /* Conversions between pointers and integers don't change the alignment
91 of the underlying object. */
92 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
93 break;
94
95 case COMPOUND_EXPR:
96 /* The value of a COMPOUND_EXPR is that of its second operand. */
97 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
98 break;
99
100 case PLUS_EXPR:
101 case MINUS_EXPR:
102 /* If two addresses are added, the alignment of the result is the
103 minimum of the two alignments. */
104 lhs = known_alignment (TREE_OPERAND (exp, 0));
105 rhs = known_alignment (TREE_OPERAND (exp, 1));
106 this_alignment = MIN (lhs, rhs);
107 break;
108
109 case POINTER_PLUS_EXPR:
110 /* If this is the pattern built for aligning types, decode it. */
111 if (TREE_CODE (TREE_OPERAND (exp, 1)) == BIT_AND_EXPR
112 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) == NEGATE_EXPR)
113 {
114 tree op = TREE_OPERAND (TREE_OPERAND (exp, 1), 1);
115 return
116 known_alignment (fold_build1 (BIT_NOT_EXPR, TREE_TYPE (op), op));
117 }
118
119 /* If we don't know the alignment of the offset, we assume that
120 of the base. */
121 lhs = known_alignment (TREE_OPERAND (exp, 0));
122 rhs = known_alignment (TREE_OPERAND (exp, 1));
123
124 if (rhs == 0)
125 this_alignment = lhs;
126 else
127 this_alignment = MIN (lhs, rhs);
128 break;
129
130 case COND_EXPR:
131 /* If there is a choice between two values, use the smaller one. */
132 lhs = known_alignment (TREE_OPERAND (exp, 1));
133 rhs = known_alignment (TREE_OPERAND (exp, 2));
134 this_alignment = MIN (lhs, rhs);
135 break;
136
137 case INTEGER_CST:
138 {
139 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
140 /* The first part of this represents the lowest bit in the constant,
141 but it is originally in bytes, not bits. */
142 this_alignment = (c & -c) * BITS_PER_UNIT;
143 }
144 break;
145
146 case MULT_EXPR:
147 /* If we know the alignment of just one side, use it. Otherwise,
148 use the product of the alignments. */
149 lhs = known_alignment (TREE_OPERAND (exp, 0));
150 rhs = known_alignment (TREE_OPERAND (exp, 1));
151
152 if (lhs == 0)
153 this_alignment = rhs;
154 else if (rhs == 0)
155 this_alignment = lhs;
156 else
157 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
158 break;
159
160 case BIT_AND_EXPR:
161 /* A bit-and expression is as aligned as the maximum alignment of the
162 operands. We typically get here for a complex lhs and a constant
163 negative power of two on the rhs to force an explicit alignment, so
164 don't bother looking at the lhs. */
165 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
166 break;
167
168 case ADDR_EXPR:
169 this_alignment = expr_align (TREE_OPERAND (exp, 0));
170 break;
171
172 case CALL_EXPR:
173 {
174 tree fndecl = get_callee_fndecl (exp);
175 if (fndecl == malloc_decl || fndecl == realloc_decl)
176 return get_target_system_allocator_alignment () * BITS_PER_UNIT;
177
178 tree t = maybe_inline_call_in_expr (exp);
179 if (t)
180 return known_alignment (t);
181 }
182
183 /* ... fall through ... */
184
185 default:
186 /* For other pointer expressions, we assume that the pointed-to object
187 is at least as aligned as the pointed-to type. Beware that we can
188 have a dummy type here (e.g. a Taft Amendment type), for which the
189 alignment is meaningless and should be ignored. */
190 if (POINTER_TYPE_P (TREE_TYPE (exp))
191 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp)))
192 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (exp))))
193 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
194 else
195 this_alignment = 0;
196 break;
197 }
198
199 return this_alignment;
200 }
201 \f
202 /* We have a comparison or assignment operation on two types, T1 and T2, which
203 are either both array types or both record types. T1 is assumed to be for
204 the left hand side operand, and T2 for the right hand side. Return the
205 type that both operands should be converted to for the operation, if any.
206 Otherwise return zero. */
207
208 static tree
209 find_common_type (tree t1, tree t2)
210 {
211 /* ??? As of today, various constructs lead to here with types of different
212 sizes even when both constants (e.g. tagged types, packable vs regular
213 component types, padded vs unpadded types, ...). While some of these
214 would better be handled upstream (types should be made consistent before
215 calling into build_binary_op), some others are really expected and we
216 have to be careful. */
217
218 /* We must avoid writing more than what the target can hold if this is for
219 an assignment and the case of tagged types is handled in build_binary_op
220 so we use the lhs type if it is known to be smaller or of constant size
221 and the rhs type is not, whatever the modes. We also force t1 in case of
222 constant size equality to minimize occurrences of view conversions on the
223 lhs of an assignment, except for the case of record types with a variant
224 part on the lhs but not on the rhs to make the conversion simpler. */
225 if (TREE_CONSTANT (TYPE_SIZE (t1))
226 && (!TREE_CONSTANT (TYPE_SIZE (t2))
227 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
228 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
229 && !(TREE_CODE (t1) == RECORD_TYPE
230 && TREE_CODE (t2) == RECORD_TYPE
231 && get_variant_part (t1)
232 && !get_variant_part (t2)))))
233 return t1;
234
235 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
236 that we will not have any alignment problems since, if we did, the
237 non-BLKmode type could not have been used. */
238 if (TYPE_MODE (t1) != BLKmode)
239 return t1;
240
241 /* If the rhs type is of constant size, use it whatever the modes. At
242 this point it is known to be smaller, or of constant size and the
243 lhs type is not. */
244 if (TREE_CONSTANT (TYPE_SIZE (t2)))
245 return t2;
246
247 /* Otherwise, if the rhs type is non-BLKmode, use it. */
248 if (TYPE_MODE (t2) != BLKmode)
249 return t2;
250
251 /* In this case, both types have variable size and BLKmode. It's
252 probably best to leave the "type mismatch" because changing it
253 could cause a bad self-referential reference. */
254 return NULL_TREE;
255 }
256 \f
257 /* Return an expression tree representing an equality comparison of A1 and A2,
258 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
259
260 Two arrays are equal in one of two ways: (1) if both have zero length in
261 some dimension (not necessarily the same dimension) or (2) if the lengths
262 in each dimension are equal and the data is equal. We perform the length
263 tests in as efficient a manner as possible. */
264
265 static tree
266 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
267 {
268 tree result = convert (result_type, boolean_true_node);
269 tree a1_is_null = convert (result_type, boolean_false_node);
270 tree a2_is_null = convert (result_type, boolean_false_node);
271 tree t1 = TREE_TYPE (a1);
272 tree t2 = TREE_TYPE (a2);
273 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
274 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
275 bool length_zero_p = false;
276
277 /* If the operands have side-effects, they need to be evaluated only once
278 in spite of the multiple references in the comparison. */
279 if (a1_side_effects_p)
280 a1 = gnat_protect_expr (a1);
281
282 if (a2_side_effects_p)
283 a2 = gnat_protect_expr (a2);
284
285 /* Process each dimension separately and compare the lengths. If any
286 dimension has a length known to be zero, set LENGTH_ZERO_P to true
287 in order to suppress the comparison of the data at the end. */
288 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
289 {
290 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
291 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
292 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
293 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
294 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
295 size_one_node);
296 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
297 size_one_node);
298 tree comparison, this_a1_is_null, this_a2_is_null;
299
300 /* If the length of the first array is a constant, swap our operands
301 unless the length of the second array is the constant zero. */
302 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
303 {
304 tree tem;
305 bool btem;
306
307 tem = a1, a1 = a2, a2 = tem;
308 tem = t1, t1 = t2, t2 = tem;
309 tem = lb1, lb1 = lb2, lb2 = tem;
310 tem = ub1, ub1 = ub2, ub2 = tem;
311 tem = length1, length1 = length2, length2 = tem;
312 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
313 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
314 a2_side_effects_p = btem;
315 }
316
317 /* If the length of the second array is the constant zero, we can just
318 use the original stored bounds for the first array and see whether
319 last < first holds. */
320 if (integer_zerop (length2))
321 {
322 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
323
324 length_zero_p = true;
325
326 ub1
327 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
328 lb1
329 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
330
331 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
332 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
333 if (EXPR_P (comparison))
334 SET_EXPR_LOCATION (comparison, loc);
335
336 this_a1_is_null = comparison;
337 this_a2_is_null = convert (result_type, boolean_true_node);
338 }
339
340 /* Otherwise, if the length is some other constant value, we know that
341 this dimension in the second array cannot be superflat, so we can
342 just use its length computed from the actual stored bounds. */
343 else if (TREE_CODE (length2) == INTEGER_CST)
344 {
345 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
346
347 ub1
348 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
349 lb1
350 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
351 /* Note that we know that UB2 and LB2 are constant and hence
352 cannot contain a PLACEHOLDER_EXPR. */
353 ub2
354 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
355 lb2
356 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
357
358 comparison
359 = fold_build2_loc (loc, EQ_EXPR, result_type,
360 build_binary_op (MINUS_EXPR, b, ub1, lb1),
361 build_binary_op (MINUS_EXPR, b, ub2, lb2));
362 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
363 if (EXPR_P (comparison))
364 SET_EXPR_LOCATION (comparison, loc);
365
366 this_a1_is_null
367 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
368
369 this_a2_is_null = convert (result_type, boolean_false_node);
370 }
371
372 /* Otherwise, compare the computed lengths. */
373 else
374 {
375 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
376 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
377
378 comparison
379 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
380
381 /* If the length expression is of the form (cond ? val : 0), assume
382 that cond is equivalent to (length != 0). That's guaranteed by
383 construction of the array types in gnat_to_gnu_entity. */
384 if (TREE_CODE (length1) == COND_EXPR
385 && integer_zerop (TREE_OPERAND (length1, 2)))
386 this_a1_is_null
387 = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
388 else
389 this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
390 length1, size_zero_node);
391
392 /* Likewise for the second array. */
393 if (TREE_CODE (length2) == COND_EXPR
394 && integer_zerop (TREE_OPERAND (length2, 2)))
395 this_a2_is_null
396 = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
397 else
398 this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
399 length2, size_zero_node);
400 }
401
402 /* Append expressions for this dimension to the final expressions. */
403 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
404 result, comparison);
405
406 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
407 this_a1_is_null, a1_is_null);
408
409 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
410 this_a2_is_null, a2_is_null);
411
412 t1 = TREE_TYPE (t1);
413 t2 = TREE_TYPE (t2);
414 }
415
416 /* Unless the length of some dimension is known to be zero, compare the
417 data in the array. */
418 if (!length_zero_p)
419 {
420 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
421 tree comparison;
422
423 if (type)
424 {
425 a1 = convert (type, a1),
426 a2 = convert (type, a2);
427 }
428
429 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
430
431 result
432 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
433 }
434
435 /* The result is also true if both sizes are zero. */
436 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
437 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
438 a1_is_null, a2_is_null),
439 result);
440
441 /* If the operands have side-effects, they need to be evaluated before
442 doing the tests above since the place they otherwise would end up
443 being evaluated at run time could be wrong. */
444 if (a1_side_effects_p)
445 result = build2 (COMPOUND_EXPR, result_type, a1, result);
446
447 if (a2_side_effects_p)
448 result = build2 (COMPOUND_EXPR, result_type, a2, result);
449
450 return result;
451 }
452
453 /* Return an expression tree representing an equality comparison of P1 and P2,
454 two objects of fat pointer type. The result should be of type RESULT_TYPE.
455
456 Two fat pointers are equal in one of two ways: (1) if both have a null
457 pointer to the array or (2) if they contain the same couple of pointers.
458 We perform the comparison in as efficient a manner as possible. */
459
460 static tree
461 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
462 {
463 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
464 tree p1_array_is_null, p2_array_is_null;
465
466 /* If either operand has side-effects, they have to be evaluated only once
467 in spite of the multiple references to the operand in the comparison. */
468 p1 = gnat_protect_expr (p1);
469 p2 = gnat_protect_expr (p2);
470
471 /* The constant folder doesn't fold fat pointer types so we do it here. */
472 if (TREE_CODE (p1) == CONSTRUCTOR)
473 p1_array = CONSTRUCTOR_ELT (p1, 0)->value;
474 else
475 p1_array = build_component_ref (p1, TYPE_FIELDS (TREE_TYPE (p1)), true);
476
477 p1_array_is_null
478 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
479 fold_convert_loc (loc, TREE_TYPE (p1_array),
480 null_pointer_node));
481
482 if (TREE_CODE (p2) == CONSTRUCTOR)
483 p2_array = CONSTRUCTOR_ELT (p2, 0)->value;
484 else
485 p2_array = build_component_ref (p2, TYPE_FIELDS (TREE_TYPE (p2)), true);
486
487 p2_array_is_null
488 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
489 fold_convert_loc (loc, TREE_TYPE (p2_array),
490 null_pointer_node));
491
492 /* If one of the pointers to the array is null, just compare the other. */
493 if (integer_zerop (p1_array))
494 return p2_array_is_null;
495 else if (integer_zerop (p2_array))
496 return p1_array_is_null;
497
498 /* Otherwise, do the fully-fledged comparison. */
499 same_array
500 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
501
502 if (TREE_CODE (p1) == CONSTRUCTOR)
503 p1_bounds = CONSTRUCTOR_ELT (p1, 1)->value;
504 else
505 p1_bounds
506 = build_component_ref (p1, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))),
507 true);
508
509 if (TREE_CODE (p2) == CONSTRUCTOR)
510 p2_bounds = CONSTRUCTOR_ELT (p2, 1)->value;
511 else
512 p2_bounds
513 = build_component_ref (p2, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))),
514 true);
515
516 same_bounds
517 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
518
519 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
520 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
521 build_binary_op (TRUTH_ORIF_EXPR, result_type,
522 p1_array_is_null, same_bounds));
523 }
524 \f
525 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
526 type TYPE. We know that TYPE is a modular type with a nonbinary
527 modulus. */
528
529 static tree
530 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
531 tree rhs)
532 {
533 tree modulus = TYPE_MODULUS (type);
534 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
535 unsigned int precision;
536 bool unsignedp = true;
537 tree op_type = type;
538 tree result;
539
540 /* If this is an addition of a constant, convert it to a subtraction
541 of a constant since we can do that faster. */
542 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
543 {
544 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
545 op_code = MINUS_EXPR;
546 }
547
548 /* For the logical operations, we only need PRECISION bits. For
549 addition and subtraction, we need one more and for multiplication we
550 need twice as many. But we never want to make a size smaller than
551 our size. */
552 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
553 needed_precision += 1;
554 else if (op_code == MULT_EXPR)
555 needed_precision *= 2;
556
557 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
558
559 /* Unsigned will do for everything but subtraction. */
560 if (op_code == MINUS_EXPR)
561 unsignedp = false;
562
563 /* If our type is the wrong signedness or isn't wide enough, make a new
564 type and convert both our operands to it. */
565 if (TYPE_PRECISION (op_type) < precision
566 || TYPE_UNSIGNED (op_type) != unsignedp)
567 {
568 /* Copy the type so we ensure it can be modified to make it modular. */
569 op_type = copy_type (gnat_type_for_size (precision, unsignedp));
570 modulus = convert (op_type, modulus);
571 SET_TYPE_MODULUS (op_type, modulus);
572 TYPE_MODULAR_P (op_type) = 1;
573 lhs = convert (op_type, lhs);
574 rhs = convert (op_type, rhs);
575 }
576
577 /* Do the operation, then we'll fix it up. */
578 result = fold_build2 (op_code, op_type, lhs, rhs);
579
580 /* For multiplication, we have no choice but to do a full modulus
581 operation. However, we want to do this in the narrowest
582 possible size. */
583 if (op_code == MULT_EXPR)
584 {
585 /* Copy the type so we ensure it can be modified to make it modular. */
586 tree div_type = copy_type (gnat_type_for_size (needed_precision, 1));
587 modulus = convert (div_type, modulus);
588 SET_TYPE_MODULUS (div_type, modulus);
589 TYPE_MODULAR_P (div_type) = 1;
590 result = convert (op_type,
591 fold_build2 (TRUNC_MOD_EXPR, div_type,
592 convert (div_type, result), modulus));
593 }
594
595 /* For subtraction, add the modulus back if we are negative. */
596 else if (op_code == MINUS_EXPR)
597 {
598 result = gnat_protect_expr (result);
599 result = fold_build3 (COND_EXPR, op_type,
600 fold_build2 (LT_EXPR, boolean_type_node, result,
601 build_int_cst (op_type, 0)),
602 fold_build2 (PLUS_EXPR, op_type, result, modulus),
603 result);
604 }
605
606 /* For the other operations, subtract the modulus if we are >= it. */
607 else
608 {
609 result = gnat_protect_expr (result);
610 result = fold_build3 (COND_EXPR, op_type,
611 fold_build2 (GE_EXPR, boolean_type_node,
612 result, modulus),
613 fold_build2 (MINUS_EXPR, op_type,
614 result, modulus),
615 result);
616 }
617
618 return convert (type, result);
619 }
620 \f
621 /* This page contains routines that implement the Ada semantics with regard
622 to atomic objects. They are fully piggybacked on the middle-end support
623 for atomic loads and stores.
624
625 *** Memory barriers and volatile objects ***
626
627 We implement the weakened form of the C.6(16) clause that was introduced
628 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
629 implementable without significant performance hits on modern platforms.
630
631 We also take advantage of the requirements imposed on shared variables by
632 9.10 (conditions for sequential actions) to have non-erroneous execution
633 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
634 volatile updates with regard to sequential actions, i.e. with regard to
635 reads or updates of atomic objects.
636
637 As such, an update of an atomic object by a task requires that all earlier
638 accesses to volatile objects have completed. Similarly, later accesses to
639 volatile objects cannot be reordered before the update of the atomic object.
640 So, memory barriers both before and after the atomic update are needed.
641
642 For a read of an atomic object, to avoid seeing writes of volatile objects
643 by a task earlier than by the other tasks, a memory barrier is needed before
644 the atomic read. Finally, to avoid reordering later reads or updates of
645 volatile objects to before the atomic read, a barrier is needed after the
646 atomic read.
647
648 So, memory barriers are needed before and after atomic reads and updates.
649 And, in order to simplify the implementation, we use full memory barriers
650 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
651
652 /* Return the size of TYPE, which must be a positive power of 2. */
653
654 static unsigned int
655 resolve_atomic_size (tree type)
656 {
657 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
658
659 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
660 return size;
661
662 /* We shouldn't reach here without having already detected that the size
663 isn't compatible with an atomic access. */
664 gcc_assert (Serious_Errors_Detected);
665
666 return 0;
667 }
668
669 /* Build an atomic load for the underlying atomic object in SRC. SYNC is
670 true if the load requires synchronization. */
671
672 tree
673 build_atomic_load (tree src, bool sync)
674 {
675 tree ptr_type
676 = build_pointer_type
677 (build_qualified_type (void_type_node,
678 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
679 tree mem_model
680 = build_int_cst (integer_type_node,
681 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
682 tree orig_src = src;
683 tree t, addr, val;
684 unsigned int size;
685 int fncode;
686
687 /* Remove conversions to get the address of the underlying object. */
688 src = remove_conversions (src, false);
689 size = resolve_atomic_size (TREE_TYPE (src));
690 if (size == 0)
691 return orig_src;
692
693 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
694 t = builtin_decl_implicit ((enum built_in_function) fncode);
695
696 addr = build_unary_op (ADDR_EXPR, ptr_type, src);
697 val = build_call_expr (t, 2, addr, mem_model);
698
699 /* First reinterpret the loaded bits in the original type of the load,
700 then convert to the expected result type. */
701 t = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (src), val);
702 return convert (TREE_TYPE (orig_src), t);
703 }
704
705 /* Build an atomic store from SRC to the underlying atomic object in DEST.
706 SYNC is true if the store requires synchronization. */
707
708 tree
709 build_atomic_store (tree dest, tree src, bool sync)
710 {
711 tree ptr_type
712 = build_pointer_type
713 (build_qualified_type (void_type_node,
714 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
715 tree mem_model
716 = build_int_cst (integer_type_node,
717 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
718 tree orig_dest = dest;
719 tree t, int_type, addr;
720 unsigned int size;
721 int fncode;
722
723 /* Remove conversions to get the address of the underlying object. */
724 dest = remove_conversions (dest, false);
725 size = resolve_atomic_size (TREE_TYPE (dest));
726 if (size == 0)
727 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
728
729 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
730 t = builtin_decl_implicit ((enum built_in_function) fncode);
731 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
732
733 /* First convert the bits to be stored to the original type of the store,
734 then reinterpret them in the effective type. But if the original type
735 is a padded type with the same size, convert to the inner type instead,
736 as we don't want to artificially introduce a CONSTRUCTOR here. */
737 if (TYPE_IS_PADDING_P (TREE_TYPE (dest))
738 && TYPE_SIZE (TREE_TYPE (dest))
739 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest)))))
740 src = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest))), src);
741 else
742 src = convert (TREE_TYPE (dest), src);
743 src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
744 addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
745
746 return build_call_expr (t, 3, addr, src, mem_model);
747 }
748
749 /* Build a load-modify-store sequence from SRC to DEST. GNAT_NODE is used for
750 the location of the sequence. Note that, even though the load and the store
751 are both atomic, the sequence itself is not atomic. */
752
753 tree
754 build_load_modify_store (tree dest, tree src, Node_Id gnat_node)
755 {
756 /* We will be modifying DEST below so we build a copy. */
757 dest = copy_node (dest);
758 tree ref = dest;
759
760 while (handled_component_p (ref))
761 {
762 /* The load should already have been generated during the translation
763 of the GNAT destination tree; find it out in the GNU tree. */
764 if (TREE_CODE (TREE_OPERAND (ref, 0)) == VIEW_CONVERT_EXPR)
765 {
766 tree op = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
767 if (TREE_CODE (op) == CALL_EXPR && call_is_atomic_load (op))
768 {
769 tree type = TREE_TYPE (TREE_OPERAND (ref, 0));
770 tree t = CALL_EXPR_ARG (op, 0);
771 tree obj, temp, stmt;
772
773 /* Find out the loaded object. */
774 if (TREE_CODE (t) == NOP_EXPR)
775 t = TREE_OPERAND (t, 0);
776 if (TREE_CODE (t) == ADDR_EXPR)
777 obj = TREE_OPERAND (t, 0);
778 else
779 obj = build1 (INDIRECT_REF, type, t);
780
781 /* Drop atomic and volatile qualifiers for the temporary. */
782 type = TYPE_MAIN_VARIANT (type);
783
784 /* And drop BLKmode, if need be, to put it into a register. */
785 if (TYPE_MODE (type) == BLKmode)
786 {
787 unsigned int size = tree_to_uhwi (TYPE_SIZE (type));
788 type = copy_type (type);
789 SET_TYPE_MODE (type, mode_for_size (size, MODE_INT, 0));
790 }
791
792 /* Create the temporary by inserting a SAVE_EXPR. */
793 temp = build1 (SAVE_EXPR, type,
794 build1 (VIEW_CONVERT_EXPR, type, op));
795 TREE_OPERAND (ref, 0) = temp;
796
797 start_stmt_group ();
798
799 /* Build the modify of the temporary. */
800 stmt = build_binary_op (MODIFY_EXPR, NULL_TREE, dest, src);
801 add_stmt_with_node (stmt, gnat_node);
802
803 /* Build the store to the object. */
804 stmt = build_atomic_store (obj, temp, false);
805 add_stmt_with_node (stmt, gnat_node);
806
807 return end_stmt_group ();
808 }
809 }
810
811 TREE_OPERAND (ref, 0) = copy_node (TREE_OPERAND (ref, 0));
812 ref = TREE_OPERAND (ref, 0);
813 }
814
815 /* Something went wrong earlier if we have not found the atomic load. */
816 gcc_unreachable ();
817 }
818 \f
819 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
820 desired for the result. Usually the operation is to be performed
821 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
822 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
823 case the type to be used will be derived from the operands.
824
825 This function is very much unlike the ones for C and C++ since we
826 have already done any type conversion and matching required. All we
827 have to do here is validate the work done by SEM and handle subtypes. */
828
829 tree
830 build_binary_op (enum tree_code op_code, tree result_type,
831 tree left_operand, tree right_operand)
832 {
833 tree left_type = TREE_TYPE (left_operand);
834 tree right_type = TREE_TYPE (right_operand);
835 tree left_base_type = get_base_type (left_type);
836 tree right_base_type = get_base_type (right_type);
837 tree operation_type = result_type;
838 tree best_type = NULL_TREE;
839 tree modulus, result;
840 bool has_side_effects = false;
841
842 if (operation_type
843 && TREE_CODE (operation_type) == RECORD_TYPE
844 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
845 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
846
847 if (operation_type
848 && TREE_CODE (operation_type) == INTEGER_TYPE
849 && TYPE_EXTRA_SUBTYPE_P (operation_type))
850 operation_type = get_base_type (operation_type);
851
852 modulus = (operation_type
853 && TREE_CODE (operation_type) == INTEGER_TYPE
854 && TYPE_MODULAR_P (operation_type)
855 ? TYPE_MODULUS (operation_type) : NULL_TREE);
856
857 switch (op_code)
858 {
859 case INIT_EXPR:
860 case MODIFY_EXPR:
861 gcc_checking_assert (!result_type);
862
863 /* If there were integral or pointer conversions on the LHS, remove
864 them; we'll be putting them back below if needed. Likewise for
865 conversions between array and record types, except for justified
866 modular types. But don't do this if the right operand is not
867 BLKmode (for packed arrays) unless we are not changing the mode. */
868 while ((CONVERT_EXPR_P (left_operand)
869 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
870 && (((INTEGRAL_TYPE_P (left_type)
871 || POINTER_TYPE_P (left_type))
872 && (INTEGRAL_TYPE_P (TREE_TYPE
873 (TREE_OPERAND (left_operand, 0)))
874 || POINTER_TYPE_P (TREE_TYPE
875 (TREE_OPERAND (left_operand, 0)))))
876 || (((TREE_CODE (left_type) == RECORD_TYPE
877 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
878 || TREE_CODE (left_type) == ARRAY_TYPE)
879 && ((TREE_CODE (TREE_TYPE
880 (TREE_OPERAND (left_operand, 0)))
881 == RECORD_TYPE)
882 || (TREE_CODE (TREE_TYPE
883 (TREE_OPERAND (left_operand, 0)))
884 == ARRAY_TYPE))
885 && (TYPE_MODE (right_type) == BLKmode
886 || (TYPE_MODE (left_type)
887 == TYPE_MODE (TREE_TYPE
888 (TREE_OPERAND
889 (left_operand, 0))))))))
890 {
891 left_operand = TREE_OPERAND (left_operand, 0);
892 left_type = TREE_TYPE (left_operand);
893 }
894
895 /* If a class-wide type may be involved, force use of the RHS type. */
896 if ((TREE_CODE (right_type) == RECORD_TYPE
897 || TREE_CODE (right_type) == UNION_TYPE)
898 && TYPE_ALIGN_OK (right_type))
899 operation_type = right_type;
900
901 /* If we are copying between padded objects with compatible types, use
902 the padded view of the objects, this is very likely more efficient.
903 Likewise for a padded object that is assigned a constructor, if we
904 can convert the constructor to the inner type, to avoid putting a
905 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
906 actually copied anything. */
907 else if (TYPE_IS_PADDING_P (left_type)
908 && TREE_CONSTANT (TYPE_SIZE (left_type))
909 && ((TREE_CODE (right_operand) == COMPONENT_REF
910 && TYPE_MAIN_VARIANT (left_type)
911 == TYPE_MAIN_VARIANT
912 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
913 || (TREE_CODE (right_operand) == CONSTRUCTOR
914 && !CONTAINS_PLACEHOLDER_P
915 (DECL_SIZE (TYPE_FIELDS (left_type)))))
916 && !integer_zerop (TYPE_SIZE (right_type)))
917 {
918 /* We make an exception for a BLKmode type padding a non-BLKmode
919 inner type and do the conversion of the LHS right away, since
920 unchecked_convert wouldn't do it properly. */
921 if (TYPE_MODE (left_type) == BLKmode
922 && TYPE_MODE (right_type) != BLKmode
923 && TREE_CODE (right_operand) != CONSTRUCTOR)
924 {
925 operation_type = right_type;
926 left_operand = convert (operation_type, left_operand);
927 left_type = operation_type;
928 }
929 else
930 operation_type = left_type;
931 }
932
933 /* If we have a call to a function that returns with variable size, use
934 the RHS type in case we want to use the return slot optimization. */
935 else if (TREE_CODE (right_operand) == CALL_EXPR
936 && return_type_with_variable_size_p (right_type))
937 operation_type = right_type;
938
939 /* Find the best type to use for copying between aggregate types. */
940 else if (((TREE_CODE (left_type) == ARRAY_TYPE
941 && TREE_CODE (right_type) == ARRAY_TYPE)
942 || (TREE_CODE (left_type) == RECORD_TYPE
943 && TREE_CODE (right_type) == RECORD_TYPE))
944 && (best_type = find_common_type (left_type, right_type)))
945 operation_type = best_type;
946
947 /* Otherwise use the LHS type. */
948 else
949 operation_type = left_type;
950
951 /* Ensure everything on the LHS is valid. If we have a field reference,
952 strip anything that get_inner_reference can handle. Then remove any
953 conversions between types having the same code and mode. And mark
954 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
955 either an INDIRECT_REF, a NULL_EXPR, a SAVE_EXPR or a DECL node. */
956 result = left_operand;
957 while (true)
958 {
959 tree restype = TREE_TYPE (result);
960
961 if (TREE_CODE (result) == COMPONENT_REF
962 || TREE_CODE (result) == ARRAY_REF
963 || TREE_CODE (result) == ARRAY_RANGE_REF)
964 while (handled_component_p (result))
965 result = TREE_OPERAND (result, 0);
966 else if (TREE_CODE (result) == REALPART_EXPR
967 || TREE_CODE (result) == IMAGPART_EXPR
968 || (CONVERT_EXPR_P (result)
969 && (((TREE_CODE (restype)
970 == TREE_CODE (TREE_TYPE
971 (TREE_OPERAND (result, 0))))
972 && (TYPE_MODE (TREE_TYPE
973 (TREE_OPERAND (result, 0)))
974 == TYPE_MODE (restype)))
975 || TYPE_ALIGN_OK (restype))))
976 result = TREE_OPERAND (result, 0);
977 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
978 {
979 TREE_ADDRESSABLE (result) = 1;
980 result = TREE_OPERAND (result, 0);
981 }
982 else
983 break;
984 }
985
986 gcc_assert (TREE_CODE (result) == INDIRECT_REF
987 || TREE_CODE (result) == NULL_EXPR
988 || TREE_CODE (result) == SAVE_EXPR
989 || DECL_P (result));
990
991 /* Convert the right operand to the operation type unless it is
992 either already of the correct type or if the type involves a
993 placeholder, since the RHS may not have the same record type. */
994 if (operation_type != right_type
995 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
996 {
997 right_operand = convert (operation_type, right_operand);
998 right_type = operation_type;
999 }
1000
1001 /* If the left operand is not of the same type as the operation
1002 type, wrap it up in a VIEW_CONVERT_EXPR. */
1003 if (left_type != operation_type)
1004 left_operand = unchecked_convert (operation_type, left_operand, false);
1005
1006 has_side_effects = true;
1007 modulus = NULL_TREE;
1008 break;
1009
1010 case ARRAY_REF:
1011 if (!operation_type)
1012 operation_type = TREE_TYPE (left_type);
1013
1014 /* ... fall through ... */
1015
1016 case ARRAY_RANGE_REF:
1017 /* First look through conversion between type variants. Note that
1018 this changes neither the operation type nor the type domain. */
1019 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
1020 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
1021 == TYPE_MAIN_VARIANT (left_type))
1022 {
1023 left_operand = TREE_OPERAND (left_operand, 0);
1024 left_type = TREE_TYPE (left_operand);
1025 }
1026
1027 /* For a range, make sure the element type is consistent. */
1028 if (op_code == ARRAY_RANGE_REF
1029 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
1030 operation_type = build_array_type (TREE_TYPE (left_type),
1031 TYPE_DOMAIN (operation_type));
1032
1033 /* Then convert the right operand to its base type. This will prevent
1034 unneeded sign conversions when sizetype is wider than integer. */
1035 right_operand = convert (right_base_type, right_operand);
1036 right_operand = convert_to_index_type (right_operand);
1037 modulus = NULL_TREE;
1038 break;
1039
1040 case TRUTH_ANDIF_EXPR:
1041 case TRUTH_ORIF_EXPR:
1042 case TRUTH_AND_EXPR:
1043 case TRUTH_OR_EXPR:
1044 case TRUTH_XOR_EXPR:
1045 gcc_checking_assert
1046 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1047 operation_type = left_base_type;
1048 left_operand = convert (operation_type, left_operand);
1049 right_operand = convert (operation_type, right_operand);
1050 break;
1051
1052 case GE_EXPR:
1053 case LE_EXPR:
1054 case GT_EXPR:
1055 case LT_EXPR:
1056 case EQ_EXPR:
1057 case NE_EXPR:
1058 gcc_checking_assert
1059 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1060 /* If either operand is a NULL_EXPR, just return a new one. */
1061 if (TREE_CODE (left_operand) == NULL_EXPR)
1062 return build2 (op_code, result_type,
1063 build1 (NULL_EXPR, integer_type_node,
1064 TREE_OPERAND (left_operand, 0)),
1065 integer_zero_node);
1066
1067 else if (TREE_CODE (right_operand) == NULL_EXPR)
1068 return build2 (op_code, result_type,
1069 build1 (NULL_EXPR, integer_type_node,
1070 TREE_OPERAND (right_operand, 0)),
1071 integer_zero_node);
1072
1073 /* If either object is a justified modular types, get the
1074 fields from within. */
1075 if (TREE_CODE (left_type) == RECORD_TYPE
1076 && TYPE_JUSTIFIED_MODULAR_P (left_type))
1077 {
1078 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
1079 left_operand);
1080 left_type = TREE_TYPE (left_operand);
1081 left_base_type = get_base_type (left_type);
1082 }
1083
1084 if (TREE_CODE (right_type) == RECORD_TYPE
1085 && TYPE_JUSTIFIED_MODULAR_P (right_type))
1086 {
1087 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
1088 right_operand);
1089 right_type = TREE_TYPE (right_operand);
1090 right_base_type = get_base_type (right_type);
1091 }
1092
1093 /* If both objects are arrays, compare them specially. */
1094 if ((TREE_CODE (left_type) == ARRAY_TYPE
1095 || (TREE_CODE (left_type) == INTEGER_TYPE
1096 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
1097 && (TREE_CODE (right_type) == ARRAY_TYPE
1098 || (TREE_CODE (right_type) == INTEGER_TYPE
1099 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1100 {
1101 result = compare_arrays (input_location,
1102 result_type, left_operand, right_operand);
1103 if (op_code == NE_EXPR)
1104 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1105 else
1106 gcc_assert (op_code == EQ_EXPR);
1107
1108 return result;
1109 }
1110
1111 /* Otherwise, the base types must be the same, unless they are both fat
1112 pointer types or record types. In the latter case, use the best type
1113 and convert both operands to that type. */
1114 if (left_base_type != right_base_type)
1115 {
1116 if (TYPE_IS_FAT_POINTER_P (left_base_type)
1117 && TYPE_IS_FAT_POINTER_P (right_base_type))
1118 {
1119 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1120 == TYPE_MAIN_VARIANT (right_base_type));
1121 best_type = left_base_type;
1122 }
1123
1124 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1125 && TREE_CODE (right_base_type) == RECORD_TYPE)
1126 {
1127 /* The only way this is permitted is if both types have the same
1128 name. In that case, one of them must not be self-referential.
1129 Use it as the best type. Even better with a fixed size. */
1130 gcc_assert (TYPE_NAME (left_base_type)
1131 && TYPE_NAME (left_base_type)
1132 == TYPE_NAME (right_base_type));
1133
1134 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1135 best_type = left_base_type;
1136 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1137 best_type = right_base_type;
1138 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1139 best_type = left_base_type;
1140 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1141 best_type = right_base_type;
1142 else
1143 gcc_unreachable ();
1144 }
1145
1146 else if (POINTER_TYPE_P (left_base_type)
1147 && POINTER_TYPE_P (right_base_type))
1148 {
1149 gcc_assert (TREE_TYPE (left_base_type)
1150 == TREE_TYPE (right_base_type));
1151 best_type = left_base_type;
1152 }
1153 else
1154 gcc_unreachable ();
1155
1156 left_operand = convert (best_type, left_operand);
1157 right_operand = convert (best_type, right_operand);
1158 }
1159 else
1160 {
1161 left_operand = convert (left_base_type, left_operand);
1162 right_operand = convert (right_base_type, right_operand);
1163 }
1164
1165 /* If both objects are fat pointers, compare them specially. */
1166 if (TYPE_IS_FAT_POINTER_P (left_base_type))
1167 {
1168 result
1169 = compare_fat_pointers (input_location,
1170 result_type, left_operand, right_operand);
1171 if (op_code == NE_EXPR)
1172 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1173 else
1174 gcc_assert (op_code == EQ_EXPR);
1175
1176 return result;
1177 }
1178
1179 modulus = NULL_TREE;
1180 break;
1181
1182 case LSHIFT_EXPR:
1183 case RSHIFT_EXPR:
1184 case LROTATE_EXPR:
1185 case RROTATE_EXPR:
1186 /* The RHS of a shift can be any type. Also, ignore any modulus
1187 (we used to abort, but this is needed for unchecked conversion
1188 to modular types). Otherwise, processing is the same as normal. */
1189 gcc_assert (operation_type == left_base_type);
1190 modulus = NULL_TREE;
1191 left_operand = convert (operation_type, left_operand);
1192 break;
1193
1194 case BIT_AND_EXPR:
1195 case BIT_IOR_EXPR:
1196 case BIT_XOR_EXPR:
1197 /* For binary modulus, if the inputs are in range, so are the
1198 outputs. */
1199 if (modulus && integer_pow2p (modulus))
1200 modulus = NULL_TREE;
1201 goto common;
1202
1203 case COMPLEX_EXPR:
1204 gcc_assert (TREE_TYPE (result_type) == left_base_type
1205 && TREE_TYPE (result_type) == right_base_type);
1206 left_operand = convert (left_base_type, left_operand);
1207 right_operand = convert (right_base_type, right_operand);
1208 break;
1209
1210 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1211 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1212 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1213 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1214 /* These always produce results lower than either operand. */
1215 modulus = NULL_TREE;
1216 goto common;
1217
1218 case POINTER_PLUS_EXPR:
1219 gcc_assert (operation_type == left_base_type
1220 && sizetype == right_base_type);
1221 left_operand = convert (operation_type, left_operand);
1222 right_operand = convert (sizetype, right_operand);
1223 break;
1224
1225 case PLUS_NOMOD_EXPR:
1226 case MINUS_NOMOD_EXPR:
1227 if (op_code == PLUS_NOMOD_EXPR)
1228 op_code = PLUS_EXPR;
1229 else
1230 op_code = MINUS_EXPR;
1231 modulus = NULL_TREE;
1232
1233 /* ... fall through ... */
1234
1235 case PLUS_EXPR:
1236 case MINUS_EXPR:
1237 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1238 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1239 these types but can generate addition/subtraction for Succ/Pred. */
1240 if (operation_type
1241 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1242 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1243 operation_type = left_base_type = right_base_type
1244 = gnat_type_for_mode (TYPE_MODE (operation_type),
1245 TYPE_UNSIGNED (operation_type));
1246
1247 /* ... fall through ... */
1248
1249 default:
1250 common:
1251 /* The result type should be the same as the base types of the
1252 both operands (and they should be the same). Convert
1253 everything to the result type. */
1254
1255 gcc_assert (operation_type == left_base_type
1256 && left_base_type == right_base_type);
1257 left_operand = convert (operation_type, left_operand);
1258 right_operand = convert (operation_type, right_operand);
1259 }
1260
1261 if (modulus && !integer_pow2p (modulus))
1262 {
1263 result = nonbinary_modular_operation (op_code, operation_type,
1264 left_operand, right_operand);
1265 modulus = NULL_TREE;
1266 }
1267 /* If either operand is a NULL_EXPR, just return a new one. */
1268 else if (TREE_CODE (left_operand) == NULL_EXPR)
1269 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1270 else if (TREE_CODE (right_operand) == NULL_EXPR)
1271 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1272 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1273 result = fold (build4 (op_code, operation_type, left_operand,
1274 right_operand, NULL_TREE, NULL_TREE));
1275 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1276 result = build2 (op_code, void_type_node, left_operand, right_operand);
1277 else
1278 result
1279 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1280
1281 if (TREE_CONSTANT (result))
1282 ;
1283 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1284 {
1285 if (TYPE_VOLATILE (operation_type))
1286 TREE_THIS_VOLATILE (result) = 1;
1287 }
1288 else
1289 TREE_CONSTANT (result)
1290 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1291
1292 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1293
1294 /* If we are working with modular types, perform the MOD operation
1295 if something above hasn't eliminated the need for it. */
1296 if (modulus)
1297 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1298 convert (operation_type, modulus));
1299
1300 if (result_type && result_type != operation_type)
1301 result = convert (result_type, result);
1302
1303 return result;
1304 }
1305 \f
1306 /* Similar, but for unary operations. */
1307
1308 tree
1309 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1310 {
1311 tree type = TREE_TYPE (operand);
1312 tree base_type = get_base_type (type);
1313 tree operation_type = result_type;
1314 tree result;
1315
1316 if (operation_type
1317 && TREE_CODE (operation_type) == RECORD_TYPE
1318 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1319 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1320
1321 if (operation_type
1322 && TREE_CODE (operation_type) == INTEGER_TYPE
1323 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1324 operation_type = get_base_type (operation_type);
1325
1326 switch (op_code)
1327 {
1328 case REALPART_EXPR:
1329 case IMAGPART_EXPR:
1330 if (!operation_type)
1331 result_type = operation_type = TREE_TYPE (type);
1332 else
1333 gcc_assert (result_type == TREE_TYPE (type));
1334
1335 result = fold_build1 (op_code, operation_type, operand);
1336 break;
1337
1338 case TRUTH_NOT_EXPR:
1339 gcc_checking_assert
1340 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1341 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1342 /* When not optimizing, fold the result as invert_truthvalue_loc
1343 doesn't fold the result of comparisons. This is intended to undo
1344 the trick used for boolean rvalues in gnat_to_gnu. */
1345 if (!optimize)
1346 result = fold (result);
1347 break;
1348
1349 case ATTR_ADDR_EXPR:
1350 case ADDR_EXPR:
1351 switch (TREE_CODE (operand))
1352 {
1353 case INDIRECT_REF:
1354 case UNCONSTRAINED_ARRAY_REF:
1355 result = TREE_OPERAND (operand, 0);
1356
1357 /* Make sure the type here is a pointer, not a reference.
1358 GCC wants pointer types for function addresses. */
1359 if (!result_type)
1360 result_type = build_pointer_type (type);
1361
1362 /* If the underlying object can alias everything, propagate the
1363 property since we are effectively retrieving the object. */
1364 if (POINTER_TYPE_P (TREE_TYPE (result))
1365 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1366 {
1367 if (TREE_CODE (result_type) == POINTER_TYPE
1368 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1369 result_type
1370 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1371 TYPE_MODE (result_type),
1372 true);
1373 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1374 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1375 result_type
1376 = build_reference_type_for_mode (TREE_TYPE (result_type),
1377 TYPE_MODE (result_type),
1378 true);
1379 }
1380 break;
1381
1382 case NULL_EXPR:
1383 result = operand;
1384 TREE_TYPE (result) = type = build_pointer_type (type);
1385 break;
1386
1387 case COMPOUND_EXPR:
1388 /* Fold a compound expression if it has unconstrained array type
1389 since the middle-end cannot handle it. But we don't it in the
1390 general case because it may introduce aliasing issues if the
1391 first operand is an indirect assignment and the second operand
1392 the corresponding address, e.g. for an allocator. However do
1393 it for a return value to expose it for later recognition. */
1394 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE
1395 || (TREE_CODE (TREE_OPERAND (operand, 1)) == VAR_DECL
1396 && DECL_RETURN_VALUE_P (TREE_OPERAND (operand, 1))))
1397 {
1398 result = build_unary_op (ADDR_EXPR, result_type,
1399 TREE_OPERAND (operand, 1));
1400 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1401 TREE_OPERAND (operand, 0), result);
1402 break;
1403 }
1404 goto common;
1405
1406 case ARRAY_REF:
1407 case ARRAY_RANGE_REF:
1408 case COMPONENT_REF:
1409 case BIT_FIELD_REF:
1410 /* If this is for 'Address, find the address of the prefix and add
1411 the offset to the field. Otherwise, do this the normal way. */
1412 if (op_code == ATTR_ADDR_EXPR)
1413 {
1414 HOST_WIDE_INT bitsize;
1415 HOST_WIDE_INT bitpos;
1416 tree offset, inner;
1417 machine_mode mode;
1418 int unsignedp, reversep, volatilep;
1419
1420 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1421 &mode, &unsignedp, &reversep,
1422 &volatilep, false);
1423
1424 /* If INNER is a padding type whose field has a self-referential
1425 size, convert to that inner type. We know the offset is zero
1426 and we need to have that type visible. */
1427 if (type_is_padding_self_referential (TREE_TYPE (inner)))
1428 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1429 inner);
1430
1431 /* Compute the offset as a byte offset from INNER. */
1432 if (!offset)
1433 offset = size_zero_node;
1434
1435 offset = size_binop (PLUS_EXPR, offset,
1436 size_int (bitpos / BITS_PER_UNIT));
1437
1438 /* Take the address of INNER, convert it to a pointer to our type
1439 and add the offset. */
1440 inner = build_unary_op (ADDR_EXPR,
1441 build_pointer_type (TREE_TYPE (operand)),
1442 inner);
1443 result = build_binary_op (POINTER_PLUS_EXPR, TREE_TYPE (inner),
1444 inner, offset);
1445 break;
1446 }
1447 goto common;
1448
1449 case CONSTRUCTOR:
1450 /* If this is just a constructor for a padded record, we can
1451 just take the address of the single field and convert it to
1452 a pointer to our type. */
1453 if (TYPE_IS_PADDING_P (type))
1454 {
1455 result
1456 = build_unary_op (ADDR_EXPR,
1457 build_pointer_type (TREE_TYPE (operand)),
1458 CONSTRUCTOR_ELT (operand, 0)->value);
1459 break;
1460 }
1461 goto common;
1462
1463 case NOP_EXPR:
1464 if (AGGREGATE_TYPE_P (type)
1465 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1466 return build_unary_op (ADDR_EXPR, result_type,
1467 TREE_OPERAND (operand, 0));
1468
1469 /* ... fallthru ... */
1470
1471 case VIEW_CONVERT_EXPR:
1472 /* If this just a variant conversion or if the conversion doesn't
1473 change the mode, get the result type from this type and go down.
1474 This is needed for conversions of CONST_DECLs, to eventually get
1475 to the address of their CORRESPONDING_VARs. */
1476 if ((TYPE_MAIN_VARIANT (type)
1477 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1478 || (TYPE_MODE (type) != BLKmode
1479 && (TYPE_MODE (type)
1480 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1481 return build_unary_op (ADDR_EXPR,
1482 (result_type ? result_type
1483 : build_pointer_type (type)),
1484 TREE_OPERAND (operand, 0));
1485 goto common;
1486
1487 case CONST_DECL:
1488 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1489
1490 /* ... fall through ... */
1491
1492 default:
1493 common:
1494
1495 /* If we are taking the address of a padded record whose field
1496 contains a template, take the address of the field. */
1497 if (TYPE_IS_PADDING_P (type)
1498 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1499 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1500 {
1501 type = TREE_TYPE (TYPE_FIELDS (type));
1502 operand = convert (type, operand);
1503 }
1504
1505 gnat_mark_addressable (operand);
1506 result = build_fold_addr_expr (operand);
1507 }
1508
1509 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1510 break;
1511
1512 case INDIRECT_REF:
1513 {
1514 tree t = remove_conversions (operand, false);
1515 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1516
1517 /* If TYPE is a thin pointer, either first retrieve the base if this
1518 is an expression with an offset built for the initialization of an
1519 object with an unconstrained nominal subtype, or else convert to
1520 the fat pointer. */
1521 if (TYPE_IS_THIN_POINTER_P (type))
1522 {
1523 tree rec_type = TREE_TYPE (type);
1524
1525 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1526 && TREE_OPERAND (operand, 1)
1527 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1528 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1529 {
1530 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1531 type = TREE_TYPE (operand);
1532 }
1533 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1534 {
1535 operand
1536 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1537 operand);
1538 type = TREE_TYPE (operand);
1539 }
1540 }
1541
1542 /* If we want to refer to an unconstrained array, use the appropriate
1543 expression. But this will never survive down to the back-end. */
1544 if (TYPE_IS_FAT_POINTER_P (type))
1545 {
1546 result = build1 (UNCONSTRAINED_ARRAY_REF,
1547 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1548 TREE_READONLY (result)
1549 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1550 }
1551
1552 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1553 else if (TREE_CODE (operand) == ADDR_EXPR)
1554 result = TREE_OPERAND (operand, 0);
1555
1556 /* Otherwise, build and fold the indirect reference. */
1557 else
1558 {
1559 result = build_fold_indirect_ref (operand);
1560 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1561 }
1562
1563 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1564 {
1565 TREE_SIDE_EFFECTS (result) = 1;
1566 if (TREE_CODE (result) == INDIRECT_REF)
1567 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1568 }
1569
1570 if ((TREE_CODE (result) == INDIRECT_REF
1571 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1572 && can_never_be_null)
1573 TREE_THIS_NOTRAP (result) = 1;
1574
1575 break;
1576 }
1577
1578 case NEGATE_EXPR:
1579 case BIT_NOT_EXPR:
1580 {
1581 tree modulus = ((operation_type
1582 && TREE_CODE (operation_type) == INTEGER_TYPE
1583 && TYPE_MODULAR_P (operation_type))
1584 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1585 int mod_pow2 = modulus && integer_pow2p (modulus);
1586
1587 /* If this is a modular type, there are various possibilities
1588 depending on the operation and whether the modulus is a
1589 power of two or not. */
1590
1591 if (modulus)
1592 {
1593 gcc_assert (operation_type == base_type);
1594 operand = convert (operation_type, operand);
1595
1596 /* The fastest in the negate case for binary modulus is
1597 the straightforward code; the TRUNC_MOD_EXPR below
1598 is an AND operation. */
1599 if (op_code == NEGATE_EXPR && mod_pow2)
1600 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1601 fold_build1 (NEGATE_EXPR, operation_type,
1602 operand),
1603 modulus);
1604
1605 /* For nonbinary negate case, return zero for zero operand,
1606 else return the modulus minus the operand. If the modulus
1607 is a power of two minus one, we can do the subtraction
1608 as an XOR since it is equivalent and faster on most machines. */
1609 else if (op_code == NEGATE_EXPR && !mod_pow2)
1610 {
1611 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1612 modulus,
1613 build_int_cst (operation_type,
1614 1))))
1615 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1616 operand, modulus);
1617 else
1618 result = fold_build2 (MINUS_EXPR, operation_type,
1619 modulus, operand);
1620
1621 result = fold_build3 (COND_EXPR, operation_type,
1622 fold_build2 (NE_EXPR,
1623 boolean_type_node,
1624 operand,
1625 build_int_cst
1626 (operation_type, 0)),
1627 result, operand);
1628 }
1629 else
1630 {
1631 /* For the NOT cases, we need a constant equal to
1632 the modulus minus one. For a binary modulus, we
1633 XOR against the constant and subtract the operand from
1634 that constant for nonbinary modulus. */
1635
1636 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1637 build_int_cst (operation_type, 1));
1638
1639 if (mod_pow2)
1640 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1641 operand, cnst);
1642 else
1643 result = fold_build2 (MINUS_EXPR, operation_type,
1644 cnst, operand);
1645 }
1646
1647 break;
1648 }
1649 }
1650
1651 /* ... fall through ... */
1652
1653 default:
1654 gcc_assert (operation_type == base_type);
1655 result = fold_build1 (op_code, operation_type,
1656 convert (operation_type, operand));
1657 }
1658
1659 if (result_type && TREE_TYPE (result) != result_type)
1660 result = convert (result_type, result);
1661
1662 return result;
1663 }
1664 \f
1665 /* Similar, but for COND_EXPR. */
1666
1667 tree
1668 build_cond_expr (tree result_type, tree condition_operand,
1669 tree true_operand, tree false_operand)
1670 {
1671 bool addr_p = false;
1672 tree result;
1673
1674 /* The front-end verified that result, true and false operands have
1675 same base type. Convert everything to the result type. */
1676 true_operand = convert (result_type, true_operand);
1677 false_operand = convert (result_type, false_operand);
1678
1679 /* If the result type is unconstrained, take the address of the operands and
1680 then dereference the result. Likewise if the result type is passed by
1681 reference, because creating a temporary of this type is not allowed. */
1682 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1683 || TYPE_IS_BY_REFERENCE_P (result_type)
1684 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1685 {
1686 result_type = build_pointer_type (result_type);
1687 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1688 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1689 addr_p = true;
1690 }
1691
1692 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1693 true_operand, false_operand);
1694
1695 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1696 in both arms, make sure it gets evaluated by moving it ahead of the
1697 conditional expression. This is necessary because it is evaluated
1698 in only one place at run time and would otherwise be uninitialized
1699 in one of the arms. */
1700 true_operand = skip_simple_arithmetic (true_operand);
1701 false_operand = skip_simple_arithmetic (false_operand);
1702
1703 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1704 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1705
1706 if (addr_p)
1707 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1708
1709 return result;
1710 }
1711
1712 /* Similar, but for COMPOUND_EXPR. */
1713
1714 tree
1715 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1716 {
1717 bool addr_p = false;
1718 tree result;
1719
1720 /* If the result type is unconstrained, take the address of the operand and
1721 then dereference the result. Likewise if the result type is passed by
1722 reference, but this is natively handled in the gimplifier. */
1723 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1724 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1725 {
1726 result_type = build_pointer_type (result_type);
1727 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1728 addr_p = true;
1729 }
1730
1731 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1732 expr_operand);
1733
1734 if (addr_p)
1735 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1736
1737 return result;
1738 }
1739 \f
1740 /* Conveniently construct a function call expression. FNDECL names the
1741 function to be called, N is the number of arguments, and the "..."
1742 parameters are the argument expressions. Unlike build_call_expr
1743 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1744
1745 tree
1746 build_call_n_expr (tree fndecl, int n, ...)
1747 {
1748 va_list ap;
1749 tree fntype = TREE_TYPE (fndecl);
1750 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1751
1752 va_start (ap, n);
1753 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1754 va_end (ap);
1755 return fn;
1756 }
1757 \f
1758 /* Build a goto to LABEL for a raise, with an optional call to Local_Raise.
1759 MSG gives the exception's identity for the call to Local_Raise, if any. */
1760
1761 static tree
1762 build_goto_raise (tree label, int msg)
1763 {
1764 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1765 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1766
1767 /* If Local_Raise is present, build Local_Raise (Exception'Identity). */
1768 if (Present (local_raise))
1769 {
1770 tree gnu_local_raise
1771 = gnat_to_gnu_entity (local_raise, NULL_TREE, false);
1772 tree gnu_exception_entity
1773 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, false);
1774 tree gnu_call
1775 = build_call_n_expr (gnu_local_raise, 1,
1776 build_unary_op (ADDR_EXPR, NULL_TREE,
1777 gnu_exception_entity));
1778 gnu_result
1779 = build2 (COMPOUND_EXPR, void_type_node, gnu_call, gnu_result);
1780 }
1781
1782 return gnu_result;
1783 }
1784
1785 /* Expand the SLOC of GNAT_NODE, if present, into tree location information
1786 pointed to by FILENAME, LINE and COL. Fall back to the current location
1787 if GNAT_NODE is absent or has no SLOC. */
1788
1789 static void
1790 expand_sloc (Node_Id gnat_node, tree *filename, tree *line, tree *col)
1791 {
1792 const char *str;
1793 int line_number, column_number;
1794
1795 if (Debug_Flag_NN || Exception_Locations_Suppressed)
1796 {
1797 str = "";
1798 line_number = 0;
1799 column_number = 0;
1800 }
1801 else if (Present (gnat_node) && Sloc (gnat_node) != No_Location)
1802 {
1803 str = Get_Name_String
1804 (Debug_Source_Name (Get_Source_File_Index (Sloc (gnat_node))));
1805 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1806 column_number = Get_Column_Number (Sloc (gnat_node));
1807 }
1808 else
1809 {
1810 str = lbasename (LOCATION_FILE (input_location));
1811 line_number = LOCATION_LINE (input_location);
1812 column_number = LOCATION_COLUMN (input_location);
1813 }
1814
1815 const int len = strlen (str);
1816 *filename = build_string (len, str);
1817 TREE_TYPE (*filename) = build_array_type (char_type_node,
1818 build_index_type (size_int (len)));
1819 *line = build_int_cst (NULL_TREE, line_number);
1820 if (col)
1821 *col = build_int_cst (NULL_TREE, column_number);
1822 }
1823
1824 /* Build a call to a function that raises an exception and passes file name
1825 and line number, if requested. MSG says which exception function to call.
1826 GNAT_NODE is the node conveying the source location for which the error
1827 should be signaled, or Empty in which case the error is signaled for the
1828 current location. KIND says which kind of exception node this is for,
1829 among N_Raise_{Constraint,Storage,Program}_Error. */
1830
1831 tree
1832 build_call_raise (int msg, Node_Id gnat_node, char kind)
1833 {
1834 tree fndecl = gnat_raise_decls[msg];
1835 tree label = get_exception_label (kind);
1836 tree filename, line;
1837
1838 /* If this is to be done as a goto, handle that case. */
1839 if (label)
1840 return build_goto_raise (label, msg);
1841
1842 expand_sloc (gnat_node, &filename, &line, NULL);
1843
1844 return
1845 build_call_n_expr (fndecl, 2,
1846 build1 (ADDR_EXPR,
1847 build_pointer_type (char_type_node),
1848 filename),
1849 line);
1850 }
1851
1852 /* Similar to build_call_raise, with extra information about the column
1853 where the check failed. */
1854
1855 tree
1856 build_call_raise_column (int msg, Node_Id gnat_node, char kind)
1857 {
1858 tree fndecl = gnat_raise_decls_ext[msg];
1859 tree label = get_exception_label (kind);
1860 tree filename, line, col;
1861
1862 /* If this is to be done as a goto, handle that case. */
1863 if (label)
1864 return build_goto_raise (label, msg);
1865
1866 expand_sloc (gnat_node, &filename, &line, &col);
1867
1868 return
1869 build_call_n_expr (fndecl, 3,
1870 build1 (ADDR_EXPR,
1871 build_pointer_type (char_type_node),
1872 filename),
1873 line, col);
1874 }
1875
1876 /* Similar to build_call_raise_column, for an index or range check exception ,
1877 with extra information of the form "INDEX out of range FIRST..LAST". */
1878
1879 tree
1880 build_call_raise_range (int msg, Node_Id gnat_node, char kind,
1881 tree index, tree first, tree last)
1882 {
1883 tree fndecl = gnat_raise_decls_ext[msg];
1884 tree label = get_exception_label (kind);
1885 tree filename, line, col;
1886
1887 /* If this is to be done as a goto, handle that case. */
1888 if (label)
1889 return build_goto_raise (label, msg);
1890
1891 expand_sloc (gnat_node, &filename, &line, &col);
1892
1893 return
1894 build_call_n_expr (fndecl, 6,
1895 build1 (ADDR_EXPR,
1896 build_pointer_type (char_type_node),
1897 filename),
1898 line, col,
1899 convert (integer_type_node, index),
1900 convert (integer_type_node, first),
1901 convert (integer_type_node, last));
1902 }
1903 \f
1904 /* qsort comparer for the bit positions of two constructor elements
1905 for record components. */
1906
1907 static int
1908 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1909 {
1910 const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
1911 const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
1912 const_tree const field1 = elmt1->index;
1913 const_tree const field2 = elmt2->index;
1914 const int ret
1915 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1916
1917 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1918 }
1919
1920 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1921
1922 tree
1923 gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
1924 {
1925 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1926 bool read_only = true;
1927 bool side_effects = false;
1928 tree result, obj, val;
1929 unsigned int n_elmts;
1930
1931 /* Scan the elements to see if they are all constant or if any has side
1932 effects, to let us set global flags on the resulting constructor. Count
1933 the elements along the way for possible sorting purposes below. */
1934 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1935 {
1936 /* The predicate must be in keeping with output_constructor. */
1937 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1938 || (TREE_CODE (type) == RECORD_TYPE
1939 && CONSTRUCTOR_BITFIELD_P (obj)
1940 && !initializer_constant_valid_for_bitfield_p (val))
1941 || !initializer_constant_valid_p (val,
1942 TREE_TYPE (val),
1943 TYPE_REVERSE_STORAGE_ORDER (type)))
1944 allconstant = false;
1945
1946 if (!TREE_READONLY (val))
1947 read_only = false;
1948
1949 if (TREE_SIDE_EFFECTS (val))
1950 side_effects = true;
1951 }
1952
1953 /* For record types with constant components only, sort field list
1954 by increasing bit position. This is necessary to ensure the
1955 constructor can be output as static data. */
1956 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1957 v->qsort (compare_elmt_bitpos);
1958
1959 result = build_constructor (type, v);
1960 CONSTRUCTOR_NO_CLEARING (result) = 1;
1961 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1962 TREE_SIDE_EFFECTS (result) = side_effects;
1963 TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
1964 return result;
1965 }
1966 \f
1967 /* Return a COMPONENT_REF to access FIELD in RECORD, or NULL_TREE if the field
1968 is not found in the record. Don't fold the result if NO_FOLD is true. */
1969
1970 static tree
1971 build_simple_component_ref (tree record, tree field, bool no_fold)
1972 {
1973 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (record));
1974 tree ref;
1975
1976 gcc_assert (RECORD_OR_UNION_TYPE_P (type) && COMPLETE_TYPE_P (type));
1977
1978 /* Try to fold a conversion from another record or union type unless the type
1979 contains a placeholder as it might be needed for a later substitution. */
1980 if (TREE_CODE (record) == VIEW_CONVERT_EXPR
1981 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (record, 0)))
1982 && !type_contains_placeholder_p (type))
1983 {
1984 tree op = TREE_OPERAND (record, 0);
1985
1986 /* If this is an unpadding operation, convert the underlying object to
1987 the unpadded type directly. */
1988 if (TYPE_IS_PADDING_P (type) && field == TYPE_FIELDS (type))
1989 return convert (TREE_TYPE (field), op);
1990
1991 /* Otherwise try to access FIELD directly in the underlying type, but
1992 make sure that the form of the reference doesn't change too much;
1993 this can happen for an unconstrained bit-packed array type whose
1994 constrained form can be an integer type. */
1995 ref = build_simple_component_ref (op, field, no_fold);
1996 if (ref && TREE_CODE (TREE_TYPE (ref)) == TREE_CODE (TREE_TYPE (field)))
1997 return ref;
1998 }
1999
2000 /* If this field is not in the specified record, see if we can find a field
2001 in the specified record whose original field is the same as this one. */
2002 if (DECL_CONTEXT (field) != type)
2003 {
2004 tree new_field;
2005
2006 /* First loop through normal components. */
2007 for (new_field = TYPE_FIELDS (type);
2008 new_field;
2009 new_field = DECL_CHAIN (new_field))
2010 if (SAME_FIELD_P (field, new_field))
2011 break;
2012
2013 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
2014 component in the first search. Doing this search in two steps is
2015 required to avoid hidden homonymous fields in the _Parent field. */
2016 if (!new_field)
2017 for (new_field = TYPE_FIELDS (type);
2018 new_field;
2019 new_field = DECL_CHAIN (new_field))
2020 if (DECL_INTERNAL_P (new_field)
2021 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (new_field)))
2022 {
2023 tree field_ref
2024 = build_simple_component_ref (record, new_field, no_fold);
2025 ref = build_simple_component_ref (field_ref, field, no_fold);
2026 if (ref)
2027 return ref;
2028 }
2029
2030 field = new_field;
2031 }
2032
2033 if (!field)
2034 return NULL_TREE;
2035
2036 /* If the field's offset has overflowed, do not try to access it, as doing
2037 so may trigger sanity checks deeper in the back-end. Note that we don't
2038 need to warn since this will be done on trying to declare the object. */
2039 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
2040 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
2041 return NULL_TREE;
2042
2043 ref = build3 (COMPONENT_REF, TREE_TYPE (field), record, field, NULL_TREE);
2044
2045 if (TREE_READONLY (record)
2046 || TREE_READONLY (field)
2047 || TYPE_READONLY (type))
2048 TREE_READONLY (ref) = 1;
2049
2050 if (TREE_THIS_VOLATILE (record)
2051 || TREE_THIS_VOLATILE (field)
2052 || TYPE_VOLATILE (type))
2053 TREE_THIS_VOLATILE (ref) = 1;
2054
2055 if (no_fold)
2056 return ref;
2057
2058 /* The generic folder may punt in this case because the inner array type
2059 can be self-referential, but folding is in fact not problematic. */
2060 if (TREE_CODE (record) == CONSTRUCTOR
2061 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record)))
2062 {
2063 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (record);
2064 unsigned HOST_WIDE_INT idx;
2065 tree index, value;
2066 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2067 if (index == field)
2068 return value;
2069 return ref;
2070 }
2071
2072 return fold (ref);
2073 }
2074
2075 /* Likewise, but return NULL_EXPR and generate a Constraint_Error if the
2076 field is not found in the record. */
2077
2078 tree
2079 build_component_ref (tree record, tree field, bool no_fold)
2080 {
2081 tree ref = build_simple_component_ref (record, field, no_fold);
2082 if (ref)
2083 return ref;
2084
2085 /* Assume this is an invalid user field so raise Constraint_Error. */
2086 return build1 (NULL_EXPR, TREE_TYPE (field),
2087 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2088 N_Raise_Constraint_Error));
2089 }
2090 \f
2091 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2092 identically. Process the case where a GNAT_PROC to call is provided. */
2093
2094 static inline tree
2095 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2096 Entity_Id gnat_proc, Entity_Id gnat_pool)
2097 {
2098 tree gnu_proc = gnat_to_gnu (gnat_proc);
2099 tree gnu_call;
2100
2101 /* A storage pool's underlying type is a record type (for both predefined
2102 storage pools and GNAT simple storage pools). The secondary stack uses
2103 the same mechanism, but its pool object (SS_Pool) is an integer. */
2104 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2105 {
2106 /* The size is the third parameter; the alignment is the
2107 same type. */
2108 Entity_Id gnat_size_type
2109 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2110 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2111
2112 tree gnu_pool = gnat_to_gnu (gnat_pool);
2113 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2114 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2115
2116 gnu_size = convert (gnu_size_type, gnu_size);
2117 gnu_align = convert (gnu_size_type, gnu_align);
2118
2119 /* The first arg is always the address of the storage pool; next
2120 comes the address of the object, for a deallocator, then the
2121 size and alignment. */
2122 if (gnu_obj)
2123 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2124 gnu_size, gnu_align);
2125 else
2126 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2127 gnu_size, gnu_align);
2128 }
2129
2130 /* Secondary stack case. */
2131 else
2132 {
2133 /* The size is the second parameter. */
2134 Entity_Id gnat_size_type
2135 = Etype (Next_Formal (First_Formal (gnat_proc)));
2136 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2137
2138 gnu_size = convert (gnu_size_type, gnu_size);
2139
2140 /* The first arg is the address of the object, for a deallocator,
2141 then the size. */
2142 if (gnu_obj)
2143 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2144 else
2145 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2146 }
2147
2148 return gnu_call;
2149 }
2150
2151 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2152 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2153 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2154 latter offers. */
2155
2156 static inline tree
2157 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2158 {
2159 /* When the DATA_TYPE alignment is stricter than what malloc offers
2160 (super-aligned case), we allocate an "aligning" wrapper type and return
2161 the address of its single data field with the malloc's return value
2162 stored just in front. */
2163
2164 unsigned int data_align = TYPE_ALIGN (data_type);
2165 unsigned int system_allocator_alignment
2166 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2167
2168 tree aligning_type
2169 = ((data_align > system_allocator_alignment)
2170 ? make_aligning_type (data_type, data_align, data_size,
2171 system_allocator_alignment,
2172 POINTER_SIZE / BITS_PER_UNIT,
2173 gnat_node)
2174 : NULL_TREE);
2175
2176 tree size_to_malloc
2177 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2178
2179 tree malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2180
2181 if (aligning_type)
2182 {
2183 /* Latch malloc's return value and get a pointer to the aligning field
2184 first. */
2185 tree storage_ptr = gnat_protect_expr (malloc_ptr);
2186
2187 tree aligning_record_addr
2188 = convert (build_pointer_type (aligning_type), storage_ptr);
2189
2190 tree aligning_record
2191 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2192
2193 tree aligning_field
2194 = build_component_ref (aligning_record, TYPE_FIELDS (aligning_type),
2195 false);
2196
2197 tree aligning_field_addr
2198 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2199
2200 /* Then arrange to store the allocator's return value ahead
2201 and return. */
2202 tree storage_ptr_slot_addr
2203 = build_binary_op (POINTER_PLUS_EXPR, ptr_type_node,
2204 convert (ptr_type_node, aligning_field_addr),
2205 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2206 / BITS_PER_UNIT));
2207
2208 tree storage_ptr_slot
2209 = build_unary_op (INDIRECT_REF, NULL_TREE,
2210 convert (build_pointer_type (ptr_type_node),
2211 storage_ptr_slot_addr));
2212
2213 return
2214 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2215 build_binary_op (INIT_EXPR, NULL_TREE,
2216 storage_ptr_slot, storage_ptr),
2217 aligning_field_addr);
2218 }
2219 else
2220 return malloc_ptr;
2221 }
2222
2223 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2224 designated by DATA_PTR using the __gnat_free entry point. */
2225
2226 static inline tree
2227 maybe_wrap_free (tree data_ptr, tree data_type)
2228 {
2229 /* In the regular alignment case, we pass the data pointer straight to free.
2230 In the superaligned case, we need to retrieve the initial allocator
2231 return value, stored in front of the data block at allocation time. */
2232
2233 unsigned int data_align = TYPE_ALIGN (data_type);
2234 unsigned int system_allocator_alignment
2235 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2236
2237 tree free_ptr;
2238
2239 if (data_align > system_allocator_alignment)
2240 {
2241 /* DATA_FRONT_PTR (void *)
2242 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2243 tree data_front_ptr
2244 = build_binary_op
2245 (POINTER_PLUS_EXPR, ptr_type_node,
2246 convert (ptr_type_node, data_ptr),
2247 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2248
2249 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2250 free_ptr
2251 = build_unary_op
2252 (INDIRECT_REF, NULL_TREE,
2253 convert (build_pointer_type (ptr_type_node), data_front_ptr));
2254 }
2255 else
2256 free_ptr = data_ptr;
2257
2258 return build_call_n_expr (free_decl, 1, free_ptr);
2259 }
2260
2261 /* Build a GCC tree to call an allocation or deallocation function.
2262 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2263 generate an allocator.
2264
2265 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2266 object type, used to determine the to-be-honored address alignment.
2267 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2268 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2269 to provide an error location for restriction violation messages. */
2270
2271 tree
2272 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2273 Entity_Id gnat_proc, Entity_Id gnat_pool,
2274 Node_Id gnat_node)
2275 {
2276 /* Explicit proc to call ? This one is assumed to deal with the type
2277 alignment constraints. */
2278 if (Present (gnat_proc))
2279 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2280 gnat_proc, gnat_pool);
2281
2282 /* Otherwise, object to "free" or "malloc" with possible special processing
2283 for alignments stricter than what the default allocator honors. */
2284 else if (gnu_obj)
2285 return maybe_wrap_free (gnu_obj, gnu_type);
2286 else
2287 {
2288 /* Assert that we no longer can be called with this special pool. */
2289 gcc_assert (gnat_pool != -1);
2290
2291 /* Check that we aren't violating the associated restriction. */
2292 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2293 {
2294 Check_No_Implicit_Heap_Alloc (gnat_node);
2295 if (Has_Task (Etype (gnat_node)))
2296 Check_No_Implicit_Task_Alloc (gnat_node);
2297 if (Has_Protected (Etype (gnat_node)))
2298 Check_No_Implicit_Protected_Alloc (gnat_node);
2299 }
2300 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2301 }
2302 }
2303 \f
2304 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2305 initial value is INIT, if INIT is nonzero. Convert the expression to
2306 RESULT_TYPE, which must be some pointer type, and return the result.
2307
2308 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2309 the storage pool to use. GNAT_NODE is used to provide an error
2310 location for restriction violation messages. If IGNORE_INIT_TYPE is
2311 true, ignore the type of INIT for the purpose of determining the size;
2312 this will cause the maximum size to be allocated if TYPE is of
2313 self-referential size. */
2314
2315 tree
2316 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2317 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2318 {
2319 tree size, storage, storage_deref, storage_init;
2320
2321 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2322 if (init && TREE_CODE (init) == NULL_EXPR)
2323 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2324
2325 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2326 else if (init && TREE_CODE (init) == COND_EXPR)
2327 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2328 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2329 gnat_proc, gnat_pool, gnat_node,
2330 ignore_init_type),
2331 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2332 gnat_proc, gnat_pool, gnat_node,
2333 ignore_init_type));
2334
2335 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2336 sizes of the object and its template. Allocate the whole thing and
2337 fill in the parts that are known. */
2338 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2339 {
2340 tree storage_type
2341 = build_unc_object_type_from_ptr (result_type, type,
2342 get_identifier ("ALLOC"), false);
2343 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2344 tree storage_ptr_type = build_pointer_type (storage_type);
2345
2346 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2347 init);
2348
2349 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2350 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2351 size = size_int (-1);
2352
2353 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2354 gnat_proc, gnat_pool, gnat_node);
2355 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2356 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2357 TREE_THIS_NOTRAP (storage_deref) = 1;
2358
2359 /* If there is an initializing expression, then make a constructor for
2360 the entire object including the bounds and copy it into the object.
2361 If there is no initializing expression, just set the bounds. */
2362 if (init)
2363 {
2364 vec<constructor_elt, va_gc> *v;
2365 vec_alloc (v, 2);
2366
2367 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2368 build_template (template_type, type, init));
2369 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2370 init);
2371 storage_init
2372 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2373 gnat_build_constructor (storage_type, v));
2374 }
2375 else
2376 storage_init
2377 = build_binary_op (INIT_EXPR, NULL_TREE,
2378 build_component_ref (storage_deref,
2379 TYPE_FIELDS (storage_type),
2380 false),
2381 build_template (template_type, type, NULL_TREE));
2382
2383 return build2 (COMPOUND_EXPR, result_type,
2384 storage_init, convert (result_type, storage));
2385 }
2386
2387 size = TYPE_SIZE_UNIT (type);
2388
2389 /* If we have an initializing expression, see if its size is simpler
2390 than the size from the type. */
2391 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2392 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2393 || CONTAINS_PLACEHOLDER_P (size)))
2394 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2395
2396 /* If the size is still self-referential, reference the initializing
2397 expression, if it is present. If not, this must have been a
2398 call to allocate a library-level object, in which case we use
2399 the maximum size. */
2400 if (CONTAINS_PLACEHOLDER_P (size))
2401 {
2402 if (!ignore_init_type && init)
2403 size = substitute_placeholder_in_expr (size, init);
2404 else
2405 size = max_size (size, true);
2406 }
2407
2408 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2409 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2410 size = size_int (-1);
2411
2412 storage = convert (result_type,
2413 build_call_alloc_dealloc (NULL_TREE, size, type,
2414 gnat_proc, gnat_pool,
2415 gnat_node));
2416
2417 /* If we have an initial value, protect the new address, assign the value
2418 and return the address with a COMPOUND_EXPR. */
2419 if (init)
2420 {
2421 storage = gnat_protect_expr (storage);
2422 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2423 TREE_THIS_NOTRAP (storage_deref) = 1;
2424 storage_init
2425 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2426 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2427 }
2428
2429 return storage;
2430 }
2431 \f
2432 /* Indicate that we need to take the address of T and that it therefore
2433 should not be allocated in a register. Return true if successful. */
2434
2435 bool
2436 gnat_mark_addressable (tree t)
2437 {
2438 while (true)
2439 switch (TREE_CODE (t))
2440 {
2441 case ADDR_EXPR:
2442 case COMPONENT_REF:
2443 case ARRAY_REF:
2444 case ARRAY_RANGE_REF:
2445 case REALPART_EXPR:
2446 case IMAGPART_EXPR:
2447 case VIEW_CONVERT_EXPR:
2448 case NON_LVALUE_EXPR:
2449 CASE_CONVERT:
2450 t = TREE_OPERAND (t, 0);
2451 break;
2452
2453 case COMPOUND_EXPR:
2454 t = TREE_OPERAND (t, 1);
2455 break;
2456
2457 case CONSTRUCTOR:
2458 TREE_ADDRESSABLE (t) = 1;
2459 return true;
2460
2461 case VAR_DECL:
2462 case PARM_DECL:
2463 case RESULT_DECL:
2464 TREE_ADDRESSABLE (t) = 1;
2465 return true;
2466
2467 case FUNCTION_DECL:
2468 TREE_ADDRESSABLE (t) = 1;
2469 return true;
2470
2471 case CONST_DECL:
2472 return DECL_CONST_CORRESPONDING_VAR (t)
2473 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2474
2475 default:
2476 return true;
2477 }
2478 }
2479 \f
2480 /* Return true if EXP is a stable expression for the purpose of the functions
2481 below and, therefore, can be returned unmodified by them. We accept things
2482 that are actual constants or that have already been handled. */
2483
2484 static bool
2485 gnat_stable_expr_p (tree exp)
2486 {
2487 enum tree_code code = TREE_CODE (exp);
2488 return TREE_CONSTANT (exp) || code == NULL_EXPR || code == SAVE_EXPR;
2489 }
2490
2491 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2492 but we know how to handle our own nodes. */
2493
2494 tree
2495 gnat_save_expr (tree exp)
2496 {
2497 tree type = TREE_TYPE (exp);
2498 enum tree_code code = TREE_CODE (exp);
2499
2500 if (gnat_stable_expr_p (exp))
2501 return exp;
2502
2503 if (code == UNCONSTRAINED_ARRAY_REF)
2504 {
2505 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2506 TREE_READONLY (t) = TYPE_READONLY (type);
2507 return t;
2508 }
2509
2510 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2511 This may be more efficient, but will also allow us to more easily find
2512 the match for the PLACEHOLDER_EXPR. */
2513 if (code == COMPONENT_REF
2514 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2515 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2516 TREE_OPERAND (exp, 1), NULL_TREE);
2517
2518 return save_expr (exp);
2519 }
2520
2521 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2522 is optimized under the assumption that EXP's value doesn't change before
2523 its subsequent reuse(s) except through its potential reevaluation. */
2524
2525 tree
2526 gnat_protect_expr (tree exp)
2527 {
2528 tree type = TREE_TYPE (exp);
2529 enum tree_code code = TREE_CODE (exp);
2530
2531 if (gnat_stable_expr_p (exp))
2532 return exp;
2533
2534 /* If EXP has no side effects, we theoretically don't need to do anything.
2535 However, we may be recursively passed more and more complex expressions
2536 involving checks which will be reused multiple times and eventually be
2537 unshared for gimplification; in order to avoid a complexity explosion
2538 at that point, we protect any expressions more complex than a simple
2539 arithmetic expression. */
2540 if (!TREE_SIDE_EFFECTS (exp))
2541 {
2542 tree inner = skip_simple_arithmetic (exp);
2543 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2544 return exp;
2545 }
2546
2547 /* If this is a conversion, protect what's inside the conversion. */
2548 if (code == NON_LVALUE_EXPR
2549 || CONVERT_EXPR_CODE_P (code)
2550 || code == VIEW_CONVERT_EXPR)
2551 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2552
2553 /* If we're indirectly referencing something, we only need to protect the
2554 address since the data itself can't change in these situations. */
2555 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2556 {
2557 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2558 TREE_READONLY (t) = TYPE_READONLY (type);
2559 return t;
2560 }
2561
2562 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2563 This may be more efficient, but will also allow us to more easily find
2564 the match for the PLACEHOLDER_EXPR. */
2565 if (code == COMPONENT_REF
2566 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2567 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2568 TREE_OPERAND (exp, 1), NULL_TREE);
2569
2570 /* If this is a fat pointer or a scalar, just make a SAVE_EXPR. Likewise
2571 for a CALL_EXPR as large objects are returned via invisible reference
2572 in most ABIs so the temporary will directly be filled by the callee. */
2573 if (TYPE_IS_FAT_POINTER_P (type)
2574 || !AGGREGATE_TYPE_P (type)
2575 || code == CALL_EXPR)
2576 return save_expr (exp);
2577
2578 /* Otherwise reference, protect the address and dereference. */
2579 return
2580 build_unary_op (INDIRECT_REF, type,
2581 save_expr (build_unary_op (ADDR_EXPR,
2582 build_reference_type (type),
2583 exp)));
2584 }
2585
2586 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2587 argument to force evaluation of everything. */
2588
2589 static tree
2590 gnat_stabilize_reference_1 (tree e, void *data)
2591 {
2592 const bool force = *(bool *)data;
2593 enum tree_code code = TREE_CODE (e);
2594 tree type = TREE_TYPE (e);
2595 tree result;
2596
2597 if (gnat_stable_expr_p (e))
2598 return e;
2599
2600 switch (TREE_CODE_CLASS (code))
2601 {
2602 case tcc_exceptional:
2603 case tcc_declaration:
2604 case tcc_comparison:
2605 case tcc_expression:
2606 case tcc_reference:
2607 case tcc_vl_exp:
2608 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2609 fat pointer. This may be more efficient, but will also allow
2610 us to more easily find the match for the PLACEHOLDER_EXPR. */
2611 if (code == COMPONENT_REF
2612 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2613 result
2614 = build3 (code, type,
2615 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2616 TREE_OPERAND (e, 1), NULL_TREE);
2617 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2618 so that it will only be evaluated once. */
2619 /* The tcc_reference and tcc_comparison classes could be handled as
2620 below, but it is generally faster to only evaluate them once. */
2621 else if (TREE_SIDE_EFFECTS (e) || force)
2622 return save_expr (e);
2623 else
2624 return e;
2625 break;
2626
2627 case tcc_binary:
2628 /* Recursively stabilize each operand. */
2629 result
2630 = build2 (code, type,
2631 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2632 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), data));
2633 break;
2634
2635 case tcc_unary:
2636 /* Recursively stabilize each operand. */
2637 result
2638 = build1 (code, type,
2639 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data));
2640 break;
2641
2642 default:
2643 gcc_unreachable ();
2644 }
2645
2646 TREE_READONLY (result) = TREE_READONLY (e);
2647 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2648 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2649
2650 return result;
2651 }
2652
2653 /* This is equivalent to stabilize_reference in tree.c but we know how to
2654 handle our own nodes and we take extra arguments. FORCE says whether to
2655 force evaluation of everything in REF. INIT is set to the first arm of
2656 a COMPOUND_EXPR present in REF, if any. */
2657
2658 tree
2659 gnat_stabilize_reference (tree ref, bool force, tree *init)
2660 {
2661 return
2662 gnat_rewrite_reference (ref, gnat_stabilize_reference_1, &force, init);
2663 }
2664
2665 /* Rewrite reference REF and call FUNC on each expression within REF in the
2666 process. DATA is passed unmodified to FUNC. INIT is set to the first
2667 arm of a COMPOUND_EXPR present in REF, if any. */
2668
2669 tree
2670 gnat_rewrite_reference (tree ref, rewrite_fn func, void *data, tree *init)
2671 {
2672 tree type = TREE_TYPE (ref);
2673 enum tree_code code = TREE_CODE (ref);
2674 tree result;
2675
2676 switch (code)
2677 {
2678 case CONST_DECL:
2679 case VAR_DECL:
2680 case PARM_DECL:
2681 case RESULT_DECL:
2682 /* No action is needed in this case. */
2683 return ref;
2684
2685 CASE_CONVERT:
2686 case FLOAT_EXPR:
2687 case FIX_TRUNC_EXPR:
2688 case REALPART_EXPR:
2689 case IMAGPART_EXPR:
2690 case VIEW_CONVERT_EXPR:
2691 result
2692 = build1 (code, type,
2693 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2694 init));
2695 break;
2696
2697 case INDIRECT_REF:
2698 case UNCONSTRAINED_ARRAY_REF:
2699 result = build1 (code, type, func (TREE_OPERAND (ref, 0), data));
2700 break;
2701
2702 case COMPONENT_REF:
2703 result = build3 (COMPONENT_REF, type,
2704 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2705 data, init),
2706 TREE_OPERAND (ref, 1), NULL_TREE);
2707 break;
2708
2709 case BIT_FIELD_REF:
2710 result = build3 (BIT_FIELD_REF, type,
2711 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2712 data, init),
2713 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
2714 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
2715 break;
2716
2717 case ARRAY_REF:
2718 case ARRAY_RANGE_REF:
2719 result
2720 = build4 (code, type,
2721 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2722 init),
2723 func (TREE_OPERAND (ref, 1), data),
2724 TREE_OPERAND (ref, 2), NULL_TREE);
2725 break;
2726
2727 case COMPOUND_EXPR:
2728 gcc_assert (!*init);
2729 *init = TREE_OPERAND (ref, 0);
2730 /* We expect only the pattern built in Call_to_gnu. */
2731 gcc_assert (DECL_P (TREE_OPERAND (ref, 1))
2732 || (TREE_CODE (TREE_OPERAND (ref, 1)) == COMPONENT_REF
2733 && DECL_P (TREE_OPERAND (TREE_OPERAND (ref, 1), 0))));
2734 return TREE_OPERAND (ref, 1);
2735
2736 case CALL_EXPR:
2737 {
2738 /* This can only be an atomic load. */
2739 gcc_assert (call_is_atomic_load (ref));
2740
2741 /* An atomic load is an INDIRECT_REF of its first argument. */
2742 tree t = CALL_EXPR_ARG (ref, 0);
2743 if (TREE_CODE (t) == NOP_EXPR)
2744 t = TREE_OPERAND (t, 0);
2745 if (TREE_CODE (t) == ADDR_EXPR)
2746 t = build1 (ADDR_EXPR, TREE_TYPE (t),
2747 gnat_rewrite_reference (TREE_OPERAND (t, 0), func, data,
2748 init));
2749 else
2750 t = func (t, data);
2751 t = fold_convert (TREE_TYPE (CALL_EXPR_ARG (ref, 0)), t);
2752
2753 result = build_call_expr (TREE_OPERAND (CALL_EXPR_FN (ref), 0), 2,
2754 t, CALL_EXPR_ARG (ref, 1));
2755 }
2756 break;
2757
2758 case ERROR_MARK:
2759 case NULL_EXPR:
2760 return ref;
2761
2762 default:
2763 gcc_unreachable ();
2764 }
2765
2766 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2767 may not be sustained across some paths, such as the way via build1 for
2768 INDIRECT_REF. We reset those flags here in the general case, which is
2769 consistent with the GCC version of this routine.
2770
2771 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2772 paths introduce side-effects where there was none initially (e.g. if a
2773 SAVE_EXPR is built) and we also want to keep track of that. */
2774 TREE_READONLY (result) = TREE_READONLY (ref);
2775 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2776 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2777
2778 if (code == INDIRECT_REF
2779 || code == UNCONSTRAINED_ARRAY_REF
2780 || code == ARRAY_REF
2781 || code == ARRAY_RANGE_REF)
2782 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2783
2784 return result;
2785 }
2786
2787 /* This is equivalent to get_inner_reference in expr.c but it returns the
2788 ultimate containing object only if the reference (lvalue) is constant,
2789 i.e. if it doesn't depend on the context in which it is evaluated. */
2790
2791 tree
2792 get_inner_constant_reference (tree exp)
2793 {
2794 while (true)
2795 {
2796 switch (TREE_CODE (exp))
2797 {
2798 case BIT_FIELD_REF:
2799 break;
2800
2801 case COMPONENT_REF:
2802 if (!TREE_CONSTANT (DECL_FIELD_OFFSET (TREE_OPERAND (exp, 1))))
2803 return NULL_TREE;
2804 break;
2805
2806 case ARRAY_REF:
2807 case ARRAY_RANGE_REF:
2808 {
2809 if (TREE_OPERAND (exp, 2))
2810 return NULL_TREE;
2811
2812 tree array_type = TREE_TYPE (TREE_OPERAND (exp, 0));
2813 if (!TREE_CONSTANT (TREE_OPERAND (exp, 1))
2814 || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
2815 || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type))))
2816 return NULL_TREE;
2817 }
2818 break;
2819
2820 case REALPART_EXPR:
2821 case IMAGPART_EXPR:
2822 case VIEW_CONVERT_EXPR:
2823 break;
2824
2825 default:
2826 goto done;
2827 }
2828
2829 exp = TREE_OPERAND (exp, 0);
2830 }
2831
2832 done:
2833 return exp;
2834 }
2835
2836 /* Return true if EXPR is the addition or the subtraction of a constant and,
2837 if so, set *ADD to the addend, *CST to the constant and *MINUS_P to true
2838 if this is a subtraction. */
2839
2840 bool
2841 is_simple_additive_expression (tree expr, tree *add, tree *cst, bool *minus_p)
2842 {
2843 /* Skip overflow checks. */
2844 if (TREE_CODE (expr) == COND_EXPR
2845 && TREE_CODE (COND_EXPR_THEN (expr)) == COMPOUND_EXPR
2846 && TREE_CODE (TREE_OPERAND (COND_EXPR_THEN (expr), 0)) == CALL_EXPR
2847 && get_callee_fndecl (TREE_OPERAND (COND_EXPR_THEN (expr), 0))
2848 == gnat_raise_decls[CE_Overflow_Check_Failed])
2849 expr = COND_EXPR_ELSE (expr);
2850
2851 if (TREE_CODE (expr) == PLUS_EXPR)
2852 {
2853 if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
2854 {
2855 *add = TREE_OPERAND (expr, 1);
2856 *cst = TREE_OPERAND (expr, 0);
2857 *minus_p = false;
2858 return true;
2859 }
2860 else if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
2861 {
2862 *add = TREE_OPERAND (expr, 0);
2863 *cst = TREE_OPERAND (expr, 1);
2864 *minus_p = false;
2865 return true;
2866 }
2867 }
2868 else if (TREE_CODE (expr) == MINUS_EXPR)
2869 {
2870 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
2871 {
2872 *add = TREE_OPERAND (expr, 0);
2873 *cst = TREE_OPERAND (expr, 1);
2874 *minus_p = true;
2875 return true;
2876 }
2877 }
2878
2879 return false;
2880 }
2881
2882 /* If EXPR is an expression that is invariant in the current function, in the
2883 sense that it can be evaluated anywhere in the function and any number of
2884 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2885
2886 tree
2887 gnat_invariant_expr (tree expr)
2888 {
2889 const tree type = TREE_TYPE (expr);
2890 tree add, cst;
2891 bool minus_p;
2892
2893 expr = remove_conversions (expr, false);
2894
2895 /* Look through temporaries created to capture values. */
2896 while ((TREE_CODE (expr) == CONST_DECL
2897 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2898 && decl_function_context (expr) == current_function_decl
2899 && DECL_INITIAL (expr))
2900 {
2901 expr = DECL_INITIAL (expr);
2902 /* Look into CONSTRUCTORs built to initialize padded types. */
2903 if (TYPE_IS_PADDING_P (TREE_TYPE (expr)))
2904 expr = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (expr))), expr);
2905 expr = remove_conversions (expr, false);
2906 }
2907
2908 /* We are only interested in scalar types at the moment and, even if we may
2909 have gone through padding types in the above loop, we must be back to a
2910 scalar value at this point. */
2911 if (AGGREGATE_TYPE_P (TREE_TYPE (expr)))
2912 return NULL_TREE;
2913
2914 if (TREE_CONSTANT (expr))
2915 return fold_convert (type, expr);
2916
2917 /* Deal with addition or subtraction of constants. */
2918 if (is_simple_additive_expression (expr, &add, &cst, &minus_p))
2919 {
2920 add = gnat_invariant_expr (add);
2921 if (add)
2922 return
2923 fold_build2 (minus_p ? MINUS_EXPR : PLUS_EXPR, type,
2924 fold_convert (type, add), fold_convert (type, cst));
2925 else
2926 return NULL_TREE;
2927 }
2928
2929 bool invariant_p = false;
2930 tree t = expr;
2931
2932 while (true)
2933 {
2934 switch (TREE_CODE (t))
2935 {
2936 case COMPONENT_REF:
2937 invariant_p |= DECL_INVARIANT_P (TREE_OPERAND (t, 1));
2938 break;
2939
2940 case ARRAY_REF:
2941 case ARRAY_RANGE_REF:
2942 if (!TREE_CONSTANT (TREE_OPERAND (t, 1)) || TREE_OPERAND (t, 2))
2943 return NULL_TREE;
2944 break;
2945
2946 case BIT_FIELD_REF:
2947 case REALPART_EXPR:
2948 case IMAGPART_EXPR:
2949 case VIEW_CONVERT_EXPR:
2950 CASE_CONVERT:
2951 break;
2952
2953 case INDIRECT_REF:
2954 if ((!invariant_p && !TREE_READONLY (t)) || TREE_SIDE_EFFECTS (t))
2955 return NULL_TREE;
2956 invariant_p = false;
2957 break;
2958
2959 default:
2960 goto object;
2961 }
2962
2963 t = TREE_OPERAND (t, 0);
2964 }
2965
2966 object:
2967 if (TREE_SIDE_EFFECTS (t))
2968 return NULL_TREE;
2969
2970 if (TREE_CODE (t) == CONST_DECL
2971 && (DECL_EXTERNAL (t)
2972 || decl_function_context (t) != current_function_decl))
2973 return fold_convert (type, expr);
2974
2975 if (!invariant_p && !TREE_READONLY (t))
2976 return NULL_TREE;
2977
2978 if (TREE_CODE (t) == PARM_DECL)
2979 return fold_convert (type, expr);
2980
2981 if (TREE_CODE (t) == VAR_DECL
2982 && (DECL_EXTERNAL (t)
2983 || decl_function_context (t) != current_function_decl))
2984 return fold_convert (type, expr);
2985
2986 return NULL_TREE;
2987 }
This page took 0.18077 seconds and 5 git commands to generate.