]> gcc.gnu.org Git - gcc.git/blob - gcc/ada/gcc-interface/utils2.c
* gcc-interface/utils2.c (gnat_invariant_expr): Add type conversions.
[gcc.git] / gcc / ada / gcc-interface / utils2.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2015, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "vec.h"
31 #include "alias.h"
32 #include "tree.h"
33 #include "inchash.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stringpool.h"
37 #include "varasm.h"
38 #include "flags.h"
39 #include "toplev.h"
40 #include "ggc.h"
41 #include "tree-inline.h"
42
43 #include "ada.h"
44 #include "types.h"
45 #include "atree.h"
46 #include "elists.h"
47 #include "namet.h"
48 #include "nlists.h"
49 #include "snames.h"
50 #include "stringt.h"
51 #include "uintp.h"
52 #include "fe.h"
53 #include "sinfo.h"
54 #include "einfo.h"
55 #include "ada-tree.h"
56 #include "gigi.h"
57
58 /* Return the base type of TYPE. */
59
60 tree
61 get_base_type (tree type)
62 {
63 if (TREE_CODE (type) == RECORD_TYPE
64 && TYPE_JUSTIFIED_MODULAR_P (type))
65 type = TREE_TYPE (TYPE_FIELDS (type));
66
67 while (TREE_TYPE (type)
68 && (TREE_CODE (type) == INTEGER_TYPE
69 || TREE_CODE (type) == REAL_TYPE))
70 type = TREE_TYPE (type);
71
72 return type;
73 }
74 \f
75 /* EXP is a GCC tree representing an address. See if we can find how strictly
76 the object at this address is aligned and, if so, return the alignment of
77 the object in bits. Otherwise return 0. */
78
79 unsigned int
80 known_alignment (tree exp)
81 {
82 unsigned int this_alignment;
83 unsigned int lhs, rhs;
84
85 switch (TREE_CODE (exp))
86 {
87 CASE_CONVERT:
88 case VIEW_CONVERT_EXPR:
89 case NON_LVALUE_EXPR:
90 /* Conversions between pointers and integers don't change the alignment
91 of the underlying object. */
92 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
93 break;
94
95 case COMPOUND_EXPR:
96 /* The value of a COMPOUND_EXPR is that of its second operand. */
97 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
98 break;
99
100 case PLUS_EXPR:
101 case MINUS_EXPR:
102 /* If two addresses are added, the alignment of the result is the
103 minimum of the two alignments. */
104 lhs = known_alignment (TREE_OPERAND (exp, 0));
105 rhs = known_alignment (TREE_OPERAND (exp, 1));
106 this_alignment = MIN (lhs, rhs);
107 break;
108
109 case POINTER_PLUS_EXPR:
110 /* If this is the pattern built for aligning types, decode it. */
111 if (TREE_CODE (TREE_OPERAND (exp, 1)) == BIT_AND_EXPR
112 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) == NEGATE_EXPR)
113 {
114 tree op = TREE_OPERAND (TREE_OPERAND (exp, 1), 1);
115 return
116 known_alignment (fold_build1 (BIT_NOT_EXPR, TREE_TYPE (op), op));
117 }
118
119 /* If we don't know the alignment of the offset, we assume that
120 of the base. */
121 lhs = known_alignment (TREE_OPERAND (exp, 0));
122 rhs = known_alignment (TREE_OPERAND (exp, 1));
123
124 if (rhs == 0)
125 this_alignment = lhs;
126 else
127 this_alignment = MIN (lhs, rhs);
128 break;
129
130 case COND_EXPR:
131 /* If there is a choice between two values, use the smaller one. */
132 lhs = known_alignment (TREE_OPERAND (exp, 1));
133 rhs = known_alignment (TREE_OPERAND (exp, 2));
134 this_alignment = MIN (lhs, rhs);
135 break;
136
137 case INTEGER_CST:
138 {
139 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
140 /* The first part of this represents the lowest bit in the constant,
141 but it is originally in bytes, not bits. */
142 this_alignment = (c & -c) * BITS_PER_UNIT;
143 }
144 break;
145
146 case MULT_EXPR:
147 /* If we know the alignment of just one side, use it. Otherwise,
148 use the product of the alignments. */
149 lhs = known_alignment (TREE_OPERAND (exp, 0));
150 rhs = known_alignment (TREE_OPERAND (exp, 1));
151
152 if (lhs == 0)
153 this_alignment = rhs;
154 else if (rhs == 0)
155 this_alignment = lhs;
156 else
157 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
158 break;
159
160 case BIT_AND_EXPR:
161 /* A bit-and expression is as aligned as the maximum alignment of the
162 operands. We typically get here for a complex lhs and a constant
163 negative power of two on the rhs to force an explicit alignment, so
164 don't bother looking at the lhs. */
165 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
166 break;
167
168 case ADDR_EXPR:
169 this_alignment = expr_align (TREE_OPERAND (exp, 0));
170 break;
171
172 case CALL_EXPR:
173 {
174 tree t = maybe_inline_call_in_expr (exp);
175 if (t)
176 return known_alignment (t);
177 }
178
179 /* ... fall through ... */
180
181 default:
182 /* For other pointer expressions, we assume that the pointed-to object
183 is at least as aligned as the pointed-to type. Beware that we can
184 have a dummy type here (e.g. a Taft Amendment type), for which the
185 alignment is meaningless and should be ignored. */
186 if (POINTER_TYPE_P (TREE_TYPE (exp))
187 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
188 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
189 else
190 this_alignment = 0;
191 break;
192 }
193
194 return this_alignment;
195 }
196 \f
197 /* We have a comparison or assignment operation on two types, T1 and T2, which
198 are either both array types or both record types. T1 is assumed to be for
199 the left hand side operand, and T2 for the right hand side. Return the
200 type that both operands should be converted to for the operation, if any.
201 Otherwise return zero. */
202
203 static tree
204 find_common_type (tree t1, tree t2)
205 {
206 /* ??? As of today, various constructs lead to here with types of different
207 sizes even when both constants (e.g. tagged types, packable vs regular
208 component types, padded vs unpadded types, ...). While some of these
209 would better be handled upstream (types should be made consistent before
210 calling into build_binary_op), some others are really expected and we
211 have to be careful. */
212
213 /* We must avoid writing more than what the target can hold if this is for
214 an assignment and the case of tagged types is handled in build_binary_op
215 so we use the lhs type if it is known to be smaller or of constant size
216 and the rhs type is not, whatever the modes. We also force t1 in case of
217 constant size equality to minimize occurrences of view conversions on the
218 lhs of an assignment, except for the case of record types with a variant
219 part on the lhs but not on the rhs to make the conversion simpler. */
220 if (TREE_CONSTANT (TYPE_SIZE (t1))
221 && (!TREE_CONSTANT (TYPE_SIZE (t2))
222 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
223 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
224 && !(TREE_CODE (t1) == RECORD_TYPE
225 && TREE_CODE (t2) == RECORD_TYPE
226 && get_variant_part (t1) != NULL_TREE
227 && get_variant_part (t2) == NULL_TREE))))
228 return t1;
229
230 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
231 that we will not have any alignment problems since, if we did, the
232 non-BLKmode type could not have been used. */
233 if (TYPE_MODE (t1) != BLKmode)
234 return t1;
235
236 /* If the rhs type is of constant size, use it whatever the modes. At
237 this point it is known to be smaller, or of constant size and the
238 lhs type is not. */
239 if (TREE_CONSTANT (TYPE_SIZE (t2)))
240 return t2;
241
242 /* Otherwise, if the rhs type is non-BLKmode, use it. */
243 if (TYPE_MODE (t2) != BLKmode)
244 return t2;
245
246 /* In this case, both types have variable size and BLKmode. It's
247 probably best to leave the "type mismatch" because changing it
248 could cause a bad self-referential reference. */
249 return NULL_TREE;
250 }
251 \f
252 /* Return an expression tree representing an equality comparison of A1 and A2,
253 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
254
255 Two arrays are equal in one of two ways: (1) if both have zero length in
256 some dimension (not necessarily the same dimension) or (2) if the lengths
257 in each dimension are equal and the data is equal. We perform the length
258 tests in as efficient a manner as possible. */
259
260 static tree
261 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
262 {
263 tree result = convert (result_type, boolean_true_node);
264 tree a1_is_null = convert (result_type, boolean_false_node);
265 tree a2_is_null = convert (result_type, boolean_false_node);
266 tree t1 = TREE_TYPE (a1);
267 tree t2 = TREE_TYPE (a2);
268 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
269 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
270 bool length_zero_p = false;
271
272 /* If the operands have side-effects, they need to be evaluated only once
273 in spite of the multiple references in the comparison. */
274 if (a1_side_effects_p)
275 a1 = gnat_protect_expr (a1);
276
277 if (a2_side_effects_p)
278 a2 = gnat_protect_expr (a2);
279
280 /* Process each dimension separately and compare the lengths. If any
281 dimension has a length known to be zero, set LENGTH_ZERO_P to true
282 in order to suppress the comparison of the data at the end. */
283 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
284 {
285 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
286 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
287 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
288 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
289 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
290 size_one_node);
291 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
292 size_one_node);
293 tree comparison, this_a1_is_null, this_a2_is_null;
294
295 /* If the length of the first array is a constant, swap our operands
296 unless the length of the second array is the constant zero. */
297 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
298 {
299 tree tem;
300 bool btem;
301
302 tem = a1, a1 = a2, a2 = tem;
303 tem = t1, t1 = t2, t2 = tem;
304 tem = lb1, lb1 = lb2, lb2 = tem;
305 tem = ub1, ub1 = ub2, ub2 = tem;
306 tem = length1, length1 = length2, length2 = tem;
307 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
308 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
309 a2_side_effects_p = btem;
310 }
311
312 /* If the length of the second array is the constant zero, we can just
313 use the original stored bounds for the first array and see whether
314 last < first holds. */
315 if (integer_zerop (length2))
316 {
317 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
318
319 length_zero_p = true;
320
321 ub1
322 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
323 lb1
324 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
325
326 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
327 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
328 if (EXPR_P (comparison))
329 SET_EXPR_LOCATION (comparison, loc);
330
331 this_a1_is_null = comparison;
332 this_a2_is_null = convert (result_type, boolean_true_node);
333 }
334
335 /* Otherwise, if the length is some other constant value, we know that
336 this dimension in the second array cannot be superflat, so we can
337 just use its length computed from the actual stored bounds. */
338 else if (TREE_CODE (length2) == INTEGER_CST)
339 {
340 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
341
342 ub1
343 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
344 lb1
345 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
346 /* Note that we know that UB2 and LB2 are constant and hence
347 cannot contain a PLACEHOLDER_EXPR. */
348 ub2
349 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
350 lb2
351 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
352
353 comparison
354 = fold_build2_loc (loc, EQ_EXPR, result_type,
355 build_binary_op (MINUS_EXPR, b, ub1, lb1),
356 build_binary_op (MINUS_EXPR, b, ub2, lb2));
357 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
358 if (EXPR_P (comparison))
359 SET_EXPR_LOCATION (comparison, loc);
360
361 this_a1_is_null
362 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
363
364 this_a2_is_null = convert (result_type, boolean_false_node);
365 }
366
367 /* Otherwise, compare the computed lengths. */
368 else
369 {
370 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
371 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
372
373 comparison
374 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
375
376 /* If the length expression is of the form (cond ? val : 0), assume
377 that cond is equivalent to (length != 0). That's guaranteed by
378 construction of the array types in gnat_to_gnu_entity. */
379 if (TREE_CODE (length1) == COND_EXPR
380 && integer_zerop (TREE_OPERAND (length1, 2)))
381 this_a1_is_null
382 = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
383 else
384 this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
385 length1, size_zero_node);
386
387 /* Likewise for the second array. */
388 if (TREE_CODE (length2) == COND_EXPR
389 && integer_zerop (TREE_OPERAND (length2, 2)))
390 this_a2_is_null
391 = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
392 else
393 this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
394 length2, size_zero_node);
395 }
396
397 /* Append expressions for this dimension to the final expressions. */
398 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
399 result, comparison);
400
401 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
402 this_a1_is_null, a1_is_null);
403
404 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
405 this_a2_is_null, a2_is_null);
406
407 t1 = TREE_TYPE (t1);
408 t2 = TREE_TYPE (t2);
409 }
410
411 /* Unless the length of some dimension is known to be zero, compare the
412 data in the array. */
413 if (!length_zero_p)
414 {
415 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
416 tree comparison;
417
418 if (type)
419 {
420 a1 = convert (type, a1),
421 a2 = convert (type, a2);
422 }
423
424 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
425
426 result
427 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
428 }
429
430 /* The result is also true if both sizes are zero. */
431 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
432 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
433 a1_is_null, a2_is_null),
434 result);
435
436 /* If the operands have side-effects, they need to be evaluated before
437 doing the tests above since the place they otherwise would end up
438 being evaluated at run time could be wrong. */
439 if (a1_side_effects_p)
440 result = build2 (COMPOUND_EXPR, result_type, a1, result);
441
442 if (a2_side_effects_p)
443 result = build2 (COMPOUND_EXPR, result_type, a2, result);
444
445 return result;
446 }
447
448 /* Return an expression tree representing an equality comparison of P1 and P2,
449 two objects of fat pointer type. The result should be of type RESULT_TYPE.
450
451 Two fat pointers are equal in one of two ways: (1) if both have a null
452 pointer to the array or (2) if they contain the same couple of pointers.
453 We perform the comparison in as efficient a manner as possible. */
454
455 static tree
456 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
457 {
458 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
459 tree p1_array_is_null, p2_array_is_null;
460
461 /* If either operand has side-effects, they have to be evaluated only once
462 in spite of the multiple references to the operand in the comparison. */
463 p1 = gnat_protect_expr (p1);
464 p2 = gnat_protect_expr (p2);
465
466 /* The constant folder doesn't fold fat pointer types so we do it here. */
467 if (TREE_CODE (p1) == CONSTRUCTOR)
468 p1_array = CONSTRUCTOR_ELT (p1, 0)->value;
469 else
470 p1_array = build_component_ref (p1, TYPE_FIELDS (TREE_TYPE (p1)), true);
471
472 p1_array_is_null
473 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
474 fold_convert_loc (loc, TREE_TYPE (p1_array),
475 null_pointer_node));
476
477 if (TREE_CODE (p2) == CONSTRUCTOR)
478 p2_array = CONSTRUCTOR_ELT (p2, 0)->value;
479 else
480 p2_array = build_component_ref (p2, TYPE_FIELDS (TREE_TYPE (p2)), true);
481
482 p2_array_is_null
483 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
484 fold_convert_loc (loc, TREE_TYPE (p2_array),
485 null_pointer_node));
486
487 /* If one of the pointers to the array is null, just compare the other. */
488 if (integer_zerop (p1_array))
489 return p2_array_is_null;
490 else if (integer_zerop (p2_array))
491 return p1_array_is_null;
492
493 /* Otherwise, do the fully-fledged comparison. */
494 same_array
495 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
496
497 if (TREE_CODE (p1) == CONSTRUCTOR)
498 p1_bounds = CONSTRUCTOR_ELT (p1, 1)->value;
499 else
500 p1_bounds
501 = build_component_ref (p1, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))),
502 true);
503
504 if (TREE_CODE (p2) == CONSTRUCTOR)
505 p2_bounds = CONSTRUCTOR_ELT (p2, 1)->value;
506 else
507 p2_bounds
508 = build_component_ref (p2, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))),
509 true);
510
511 same_bounds
512 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
513
514 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
515 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
516 build_binary_op (TRUTH_ORIF_EXPR, result_type,
517 p1_array_is_null, same_bounds));
518 }
519 \f
520 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
521 type TYPE. We know that TYPE is a modular type with a nonbinary
522 modulus. */
523
524 static tree
525 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
526 tree rhs)
527 {
528 tree modulus = TYPE_MODULUS (type);
529 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
530 unsigned int precision;
531 bool unsignedp = true;
532 tree op_type = type;
533 tree result;
534
535 /* If this is an addition of a constant, convert it to a subtraction
536 of a constant since we can do that faster. */
537 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
538 {
539 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
540 op_code = MINUS_EXPR;
541 }
542
543 /* For the logical operations, we only need PRECISION bits. For
544 addition and subtraction, we need one more and for multiplication we
545 need twice as many. But we never want to make a size smaller than
546 our size. */
547 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
548 needed_precision += 1;
549 else if (op_code == MULT_EXPR)
550 needed_precision *= 2;
551
552 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
553
554 /* Unsigned will do for everything but subtraction. */
555 if (op_code == MINUS_EXPR)
556 unsignedp = false;
557
558 /* If our type is the wrong signedness or isn't wide enough, make a new
559 type and convert both our operands to it. */
560 if (TYPE_PRECISION (op_type) < precision
561 || TYPE_UNSIGNED (op_type) != unsignedp)
562 {
563 /* Copy the node so we ensure it can be modified to make it modular. */
564 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
565 modulus = convert (op_type, modulus);
566 SET_TYPE_MODULUS (op_type, modulus);
567 TYPE_MODULAR_P (op_type) = 1;
568 lhs = convert (op_type, lhs);
569 rhs = convert (op_type, rhs);
570 }
571
572 /* Do the operation, then we'll fix it up. */
573 result = fold_build2 (op_code, op_type, lhs, rhs);
574
575 /* For multiplication, we have no choice but to do a full modulus
576 operation. However, we want to do this in the narrowest
577 possible size. */
578 if (op_code == MULT_EXPR)
579 {
580 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
581 modulus = convert (div_type, modulus);
582 SET_TYPE_MODULUS (div_type, modulus);
583 TYPE_MODULAR_P (div_type) = 1;
584 result = convert (op_type,
585 fold_build2 (TRUNC_MOD_EXPR, div_type,
586 convert (div_type, result), modulus));
587 }
588
589 /* For subtraction, add the modulus back if we are negative. */
590 else if (op_code == MINUS_EXPR)
591 {
592 result = gnat_protect_expr (result);
593 result = fold_build3 (COND_EXPR, op_type,
594 fold_build2 (LT_EXPR, boolean_type_node, result,
595 convert (op_type, integer_zero_node)),
596 fold_build2 (PLUS_EXPR, op_type, result, modulus),
597 result);
598 }
599
600 /* For the other operations, subtract the modulus if we are >= it. */
601 else
602 {
603 result = gnat_protect_expr (result);
604 result = fold_build3 (COND_EXPR, op_type,
605 fold_build2 (GE_EXPR, boolean_type_node,
606 result, modulus),
607 fold_build2 (MINUS_EXPR, op_type,
608 result, modulus),
609 result);
610 }
611
612 return convert (type, result);
613 }
614 \f
615 /* This page contains routines that implement the Ada semantics with regard
616 to atomic objects. They are fully piggybacked on the middle-end support
617 for atomic loads and stores.
618
619 *** Memory barriers and volatile objects ***
620
621 We implement the weakened form of the C.6(16) clause that was introduced
622 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
623 implementable without significant performance hits on modern platforms.
624
625 We also take advantage of the requirements imposed on shared variables by
626 9.10 (conditions for sequential actions) to have non-erroneous execution
627 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
628 volatile updates with regard to sequential actions, i.e. with regard to
629 reads or updates of atomic objects.
630
631 As such, an update of an atomic object by a task requires that all earlier
632 accesses to volatile objects have completed. Similarly, later accesses to
633 volatile objects cannot be reordered before the update of the atomic object.
634 So, memory barriers both before and after the atomic update are needed.
635
636 For a read of an atomic object, to avoid seeing writes of volatile objects
637 by a task earlier than by the other tasks, a memory barrier is needed before
638 the atomic read. Finally, to avoid reordering later reads or updates of
639 volatile objects to before the atomic read, a barrier is needed after the
640 atomic read.
641
642 So, memory barriers are needed before and after atomic reads and updates.
643 And, in order to simplify the implementation, we use full memory barriers
644 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
645
646 /* Return the size of TYPE, which must be a positive power of 2. */
647
648 static unsigned int
649 resolve_atomic_size (tree type)
650 {
651 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
652
653 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
654 return size;
655
656 /* We shouldn't reach here without having already detected that the size
657 isn't compatible with an atomic access. */
658 gcc_assert (Serious_Errors_Detected);
659
660 return 0;
661 }
662
663 /* Build an atomic load for the underlying atomic object in SRC. SYNC is
664 true if the load requires synchronization. */
665
666 tree
667 build_atomic_load (tree src, bool sync)
668 {
669 tree ptr_type
670 = build_pointer_type
671 (build_qualified_type (void_type_node,
672 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
673 tree mem_model
674 = build_int_cst (integer_type_node,
675 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
676 tree orig_src = src;
677 tree t, addr, val;
678 unsigned int size;
679 int fncode;
680
681 /* Remove conversions to get the address of the underlying object. */
682 src = remove_conversions (src, false);
683 size = resolve_atomic_size (TREE_TYPE (src));
684 if (size == 0)
685 return orig_src;
686
687 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
688 t = builtin_decl_implicit ((enum built_in_function) fncode);
689
690 addr = build_unary_op (ADDR_EXPR, ptr_type, src);
691 val = build_call_expr (t, 2, addr, mem_model);
692
693 /* First reinterpret the loaded bits in the original type of the load,
694 then convert to the expected result type. */
695 t = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (src), val);
696 return convert (TREE_TYPE (orig_src), t);
697 }
698
699 /* Build an atomic store from SRC to the underlying atomic object in DEST.
700 SYNC is true if the store requires synchronization. */
701
702 tree
703 build_atomic_store (tree dest, tree src, bool sync)
704 {
705 tree ptr_type
706 = build_pointer_type
707 (build_qualified_type (void_type_node,
708 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
709 tree mem_model
710 = build_int_cst (integer_type_node,
711 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
712 tree orig_dest = dest;
713 tree t, int_type, addr;
714 unsigned int size;
715 int fncode;
716
717 /* Remove conversions to get the address of the underlying object. */
718 dest = remove_conversions (dest, false);
719 size = resolve_atomic_size (TREE_TYPE (dest));
720 if (size == 0)
721 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
722
723 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
724 t = builtin_decl_implicit ((enum built_in_function) fncode);
725 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
726
727 /* First convert the bits to be stored to the original type of the store,
728 then reinterpret them in the effective type. But if the original type
729 is a padded type with the same size, convert to the inner type instead,
730 as we don't want to artificially introduce a CONSTRUCTOR here. */
731 if (TYPE_IS_PADDING_P (TREE_TYPE (dest))
732 && TYPE_SIZE (TREE_TYPE (dest))
733 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest)))))
734 src = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest))), src);
735 else
736 src = convert (TREE_TYPE (dest), src);
737 src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
738 addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
739
740 return build_call_expr (t, 3, addr, src, mem_model);
741 }
742
743 /* Build a load-modify-store sequence from SRC to DEST. GNAT_NODE is used for
744 the location of the sequence. Note that, even though the load and the store
745 are both atomic, the sequence itself is not atomic. */
746
747 tree
748 build_load_modify_store (tree dest, tree src, Node_Id gnat_node)
749 {
750 /* We will be modifying DEST below so we build a copy. */
751 dest = copy_node (dest);
752 tree ref = dest;
753
754 while (handled_component_p (ref))
755 {
756 /* The load should already have been generated during the translation
757 of the GNAT destination tree; find it out in the GNU tree. */
758 if (TREE_CODE (TREE_OPERAND (ref, 0)) == VIEW_CONVERT_EXPR)
759 {
760 tree op = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
761 if (TREE_CODE (op) == CALL_EXPR && call_is_atomic_load (op))
762 {
763 tree type = TREE_TYPE (TREE_OPERAND (ref, 0));
764 tree t = CALL_EXPR_ARG (op, 0);
765 tree obj, temp, stmt;
766
767 /* Find out the loaded object. */
768 if (TREE_CODE (t) == NOP_EXPR)
769 t = TREE_OPERAND (t, 0);
770 if (TREE_CODE (t) == ADDR_EXPR)
771 obj = TREE_OPERAND (t, 0);
772 else
773 obj = build1 (INDIRECT_REF, type, t);
774
775 /* Drop atomic and volatile qualifiers for the temporary. */
776 type = TYPE_MAIN_VARIANT (type);
777
778 /* And drop BLKmode, if need be, to put it into a register. */
779 if (TYPE_MODE (type) == BLKmode)
780 {
781 unsigned int size = tree_to_uhwi (TYPE_SIZE (type));
782 type = copy_type (type);
783 SET_TYPE_MODE (type, mode_for_size (size, MODE_INT, 0));
784 }
785
786 /* Create the temporary by inserting a SAVE_EXPR. */
787 temp = build1 (SAVE_EXPR, type,
788 build1 (VIEW_CONVERT_EXPR, type, op));
789 TREE_OPERAND (ref, 0) = temp;
790
791 start_stmt_group ();
792
793 /* Build the modify of the temporary. */
794 stmt = build_binary_op (MODIFY_EXPR, NULL_TREE, dest, src);
795 add_stmt_with_node (stmt, gnat_node);
796
797 /* Build the store to the object. */
798 stmt = build_atomic_store (obj, temp, false);
799 add_stmt_with_node (stmt, gnat_node);
800
801 return end_stmt_group ();
802 }
803 }
804
805 TREE_OPERAND (ref, 0) = copy_node (TREE_OPERAND (ref, 0));
806 ref = TREE_OPERAND (ref, 0);
807 }
808
809 /* Something went wrong earlier if we have not found the atomic load. */
810 gcc_unreachable ();
811 }
812 \f
813 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
814 desired for the result. Usually the operation is to be performed
815 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
816 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
817 case the type to be used will be derived from the operands.
818
819 This function is very much unlike the ones for C and C++ since we
820 have already done any type conversion and matching required. All we
821 have to do here is validate the work done by SEM and handle subtypes. */
822
823 tree
824 build_binary_op (enum tree_code op_code, tree result_type,
825 tree left_operand, tree right_operand)
826 {
827 tree left_type = TREE_TYPE (left_operand);
828 tree right_type = TREE_TYPE (right_operand);
829 tree left_base_type = get_base_type (left_type);
830 tree right_base_type = get_base_type (right_type);
831 tree operation_type = result_type;
832 tree best_type = NULL_TREE;
833 tree modulus, result;
834 bool has_side_effects = false;
835
836 if (operation_type
837 && TREE_CODE (operation_type) == RECORD_TYPE
838 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
839 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
840
841 if (operation_type
842 && TREE_CODE (operation_type) == INTEGER_TYPE
843 && TYPE_EXTRA_SUBTYPE_P (operation_type))
844 operation_type = get_base_type (operation_type);
845
846 modulus = (operation_type
847 && TREE_CODE (operation_type) == INTEGER_TYPE
848 && TYPE_MODULAR_P (operation_type)
849 ? TYPE_MODULUS (operation_type) : NULL_TREE);
850
851 switch (op_code)
852 {
853 case INIT_EXPR:
854 case MODIFY_EXPR:
855 gcc_checking_assert (result_type == NULL_TREE);
856
857 /* If there were integral or pointer conversions on the LHS, remove
858 them; we'll be putting them back below if needed. Likewise for
859 conversions between array and record types, except for justified
860 modular types. But don't do this if the right operand is not
861 BLKmode (for packed arrays) unless we are not changing the mode. */
862 while ((CONVERT_EXPR_P (left_operand)
863 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
864 && (((INTEGRAL_TYPE_P (left_type)
865 || POINTER_TYPE_P (left_type))
866 && (INTEGRAL_TYPE_P (TREE_TYPE
867 (TREE_OPERAND (left_operand, 0)))
868 || POINTER_TYPE_P (TREE_TYPE
869 (TREE_OPERAND (left_operand, 0)))))
870 || (((TREE_CODE (left_type) == RECORD_TYPE
871 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
872 || TREE_CODE (left_type) == ARRAY_TYPE)
873 && ((TREE_CODE (TREE_TYPE
874 (TREE_OPERAND (left_operand, 0)))
875 == RECORD_TYPE)
876 || (TREE_CODE (TREE_TYPE
877 (TREE_OPERAND (left_operand, 0)))
878 == ARRAY_TYPE))
879 && (TYPE_MODE (right_type) == BLKmode
880 || (TYPE_MODE (left_type)
881 == TYPE_MODE (TREE_TYPE
882 (TREE_OPERAND
883 (left_operand, 0))))))))
884 {
885 left_operand = TREE_OPERAND (left_operand, 0);
886 left_type = TREE_TYPE (left_operand);
887 }
888
889 /* If a class-wide type may be involved, force use of the RHS type. */
890 if ((TREE_CODE (right_type) == RECORD_TYPE
891 || TREE_CODE (right_type) == UNION_TYPE)
892 && TYPE_ALIGN_OK (right_type))
893 operation_type = right_type;
894
895 /* If we are copying between padded objects with compatible types, use
896 the padded view of the objects, this is very likely more efficient.
897 Likewise for a padded object that is assigned a constructor, if we
898 can convert the constructor to the inner type, to avoid putting a
899 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
900 actually copied anything. */
901 else if (TYPE_IS_PADDING_P (left_type)
902 && TREE_CONSTANT (TYPE_SIZE (left_type))
903 && ((TREE_CODE (right_operand) == COMPONENT_REF
904 && TYPE_MAIN_VARIANT (left_type)
905 == TYPE_MAIN_VARIANT
906 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
907 || (TREE_CODE (right_operand) == CONSTRUCTOR
908 && !CONTAINS_PLACEHOLDER_P
909 (DECL_SIZE (TYPE_FIELDS (left_type)))))
910 && !integer_zerop (TYPE_SIZE (right_type)))
911 {
912 /* We make an exception for a BLKmode type padding a non-BLKmode
913 inner type and do the conversion of the LHS right away, since
914 unchecked_convert wouldn't do it properly. */
915 if (TYPE_MODE (left_type) == BLKmode
916 && TYPE_MODE (right_type) != BLKmode
917 && TREE_CODE (right_operand) != CONSTRUCTOR)
918 {
919 operation_type = right_type;
920 left_operand = convert (operation_type, left_operand);
921 left_type = operation_type;
922 }
923 else
924 operation_type = left_type;
925 }
926
927 /* If we have a call to a function that returns with variable size, use
928 the RHS type in case we want to use the return slot optimization. */
929 else if (TREE_CODE (right_operand) == CALL_EXPR
930 && return_type_with_variable_size_p (right_type))
931 operation_type = right_type;
932
933 /* Find the best type to use for copying between aggregate types. */
934 else if (((TREE_CODE (left_type) == ARRAY_TYPE
935 && TREE_CODE (right_type) == ARRAY_TYPE)
936 || (TREE_CODE (left_type) == RECORD_TYPE
937 && TREE_CODE (right_type) == RECORD_TYPE))
938 && (best_type = find_common_type (left_type, right_type)))
939 operation_type = best_type;
940
941 /* Otherwise use the LHS type. */
942 else
943 operation_type = left_type;
944
945 /* Ensure everything on the LHS is valid. If we have a field reference,
946 strip anything that get_inner_reference can handle. Then remove any
947 conversions between types having the same code and mode. And mark
948 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
949 either an INDIRECT_REF, a NULL_EXPR, a SAVE_EXPR or a DECL node. */
950 result = left_operand;
951 while (true)
952 {
953 tree restype = TREE_TYPE (result);
954
955 if (TREE_CODE (result) == COMPONENT_REF
956 || TREE_CODE (result) == ARRAY_REF
957 || TREE_CODE (result) == ARRAY_RANGE_REF)
958 while (handled_component_p (result))
959 result = TREE_OPERAND (result, 0);
960 else if (TREE_CODE (result) == REALPART_EXPR
961 || TREE_CODE (result) == IMAGPART_EXPR
962 || (CONVERT_EXPR_P (result)
963 && (((TREE_CODE (restype)
964 == TREE_CODE (TREE_TYPE
965 (TREE_OPERAND (result, 0))))
966 && (TYPE_MODE (TREE_TYPE
967 (TREE_OPERAND (result, 0)))
968 == TYPE_MODE (restype)))
969 || TYPE_ALIGN_OK (restype))))
970 result = TREE_OPERAND (result, 0);
971 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
972 {
973 TREE_ADDRESSABLE (result) = 1;
974 result = TREE_OPERAND (result, 0);
975 }
976 else
977 break;
978 }
979
980 gcc_assert (TREE_CODE (result) == INDIRECT_REF
981 || TREE_CODE (result) == NULL_EXPR
982 || TREE_CODE (result) == SAVE_EXPR
983 || DECL_P (result));
984
985 /* Convert the right operand to the operation type unless it is
986 either already of the correct type or if the type involves a
987 placeholder, since the RHS may not have the same record type. */
988 if (operation_type != right_type
989 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
990 {
991 right_operand = convert (operation_type, right_operand);
992 right_type = operation_type;
993 }
994
995 /* If the left operand is not of the same type as the operation
996 type, wrap it up in a VIEW_CONVERT_EXPR. */
997 if (left_type != operation_type)
998 left_operand = unchecked_convert (operation_type, left_operand, false);
999
1000 has_side_effects = true;
1001 modulus = NULL_TREE;
1002 break;
1003
1004 case ARRAY_REF:
1005 if (!operation_type)
1006 operation_type = TREE_TYPE (left_type);
1007
1008 /* ... fall through ... */
1009
1010 case ARRAY_RANGE_REF:
1011 /* First look through conversion between type variants. Note that
1012 this changes neither the operation type nor the type domain. */
1013 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
1014 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
1015 == TYPE_MAIN_VARIANT (left_type))
1016 {
1017 left_operand = TREE_OPERAND (left_operand, 0);
1018 left_type = TREE_TYPE (left_operand);
1019 }
1020
1021 /* For a range, make sure the element type is consistent. */
1022 if (op_code == ARRAY_RANGE_REF
1023 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
1024 operation_type = build_array_type (TREE_TYPE (left_type),
1025 TYPE_DOMAIN (operation_type));
1026
1027 /* Then convert the right operand to its base type. This will prevent
1028 unneeded sign conversions when sizetype is wider than integer. */
1029 right_operand = convert (right_base_type, right_operand);
1030 right_operand = convert_to_index_type (right_operand);
1031 modulus = NULL_TREE;
1032 break;
1033
1034 case TRUTH_ANDIF_EXPR:
1035 case TRUTH_ORIF_EXPR:
1036 case TRUTH_AND_EXPR:
1037 case TRUTH_OR_EXPR:
1038 case TRUTH_XOR_EXPR:
1039 gcc_checking_assert
1040 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1041 operation_type = left_base_type;
1042 left_operand = convert (operation_type, left_operand);
1043 right_operand = convert (operation_type, right_operand);
1044 break;
1045
1046 case GE_EXPR:
1047 case LE_EXPR:
1048 case GT_EXPR:
1049 case LT_EXPR:
1050 case EQ_EXPR:
1051 case NE_EXPR:
1052 gcc_checking_assert
1053 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1054 /* If either operand is a NULL_EXPR, just return a new one. */
1055 if (TREE_CODE (left_operand) == NULL_EXPR)
1056 return build2 (op_code, result_type,
1057 build1 (NULL_EXPR, integer_type_node,
1058 TREE_OPERAND (left_operand, 0)),
1059 integer_zero_node);
1060
1061 else if (TREE_CODE (right_operand) == NULL_EXPR)
1062 return build2 (op_code, result_type,
1063 build1 (NULL_EXPR, integer_type_node,
1064 TREE_OPERAND (right_operand, 0)),
1065 integer_zero_node);
1066
1067 /* If either object is a justified modular types, get the
1068 fields from within. */
1069 if (TREE_CODE (left_type) == RECORD_TYPE
1070 && TYPE_JUSTIFIED_MODULAR_P (left_type))
1071 {
1072 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
1073 left_operand);
1074 left_type = TREE_TYPE (left_operand);
1075 left_base_type = get_base_type (left_type);
1076 }
1077
1078 if (TREE_CODE (right_type) == RECORD_TYPE
1079 && TYPE_JUSTIFIED_MODULAR_P (right_type))
1080 {
1081 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
1082 right_operand);
1083 right_type = TREE_TYPE (right_operand);
1084 right_base_type = get_base_type (right_type);
1085 }
1086
1087 /* If both objects are arrays, compare them specially. */
1088 if ((TREE_CODE (left_type) == ARRAY_TYPE
1089 || (TREE_CODE (left_type) == INTEGER_TYPE
1090 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
1091 && (TREE_CODE (right_type) == ARRAY_TYPE
1092 || (TREE_CODE (right_type) == INTEGER_TYPE
1093 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1094 {
1095 result = compare_arrays (input_location,
1096 result_type, left_operand, right_operand);
1097 if (op_code == NE_EXPR)
1098 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1099 else
1100 gcc_assert (op_code == EQ_EXPR);
1101
1102 return result;
1103 }
1104
1105 /* Otherwise, the base types must be the same, unless they are both fat
1106 pointer types or record types. In the latter case, use the best type
1107 and convert both operands to that type. */
1108 if (left_base_type != right_base_type)
1109 {
1110 if (TYPE_IS_FAT_POINTER_P (left_base_type)
1111 && TYPE_IS_FAT_POINTER_P (right_base_type))
1112 {
1113 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1114 == TYPE_MAIN_VARIANT (right_base_type));
1115 best_type = left_base_type;
1116 }
1117
1118 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1119 && TREE_CODE (right_base_type) == RECORD_TYPE)
1120 {
1121 /* The only way this is permitted is if both types have the same
1122 name. In that case, one of them must not be self-referential.
1123 Use it as the best type. Even better with a fixed size. */
1124 gcc_assert (TYPE_NAME (left_base_type)
1125 && TYPE_NAME (left_base_type)
1126 == TYPE_NAME (right_base_type));
1127
1128 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1129 best_type = left_base_type;
1130 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1131 best_type = right_base_type;
1132 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1133 best_type = left_base_type;
1134 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1135 best_type = right_base_type;
1136 else
1137 gcc_unreachable ();
1138 }
1139
1140 else if (POINTER_TYPE_P (left_base_type)
1141 && POINTER_TYPE_P (right_base_type))
1142 {
1143 gcc_assert (TREE_TYPE (left_base_type)
1144 == TREE_TYPE (right_base_type));
1145 best_type = left_base_type;
1146 }
1147 else
1148 gcc_unreachable ();
1149
1150 left_operand = convert (best_type, left_operand);
1151 right_operand = convert (best_type, right_operand);
1152 }
1153 else
1154 {
1155 left_operand = convert (left_base_type, left_operand);
1156 right_operand = convert (right_base_type, right_operand);
1157 }
1158
1159 /* If both objects are fat pointers, compare them specially. */
1160 if (TYPE_IS_FAT_POINTER_P (left_base_type))
1161 {
1162 result
1163 = compare_fat_pointers (input_location,
1164 result_type, left_operand, right_operand);
1165 if (op_code == NE_EXPR)
1166 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1167 else
1168 gcc_assert (op_code == EQ_EXPR);
1169
1170 return result;
1171 }
1172
1173 modulus = NULL_TREE;
1174 break;
1175
1176 case LSHIFT_EXPR:
1177 case RSHIFT_EXPR:
1178 case LROTATE_EXPR:
1179 case RROTATE_EXPR:
1180 /* The RHS of a shift can be any type. Also, ignore any modulus
1181 (we used to abort, but this is needed for unchecked conversion
1182 to modular types). Otherwise, processing is the same as normal. */
1183 gcc_assert (operation_type == left_base_type);
1184 modulus = NULL_TREE;
1185 left_operand = convert (operation_type, left_operand);
1186 break;
1187
1188 case BIT_AND_EXPR:
1189 case BIT_IOR_EXPR:
1190 case BIT_XOR_EXPR:
1191 /* For binary modulus, if the inputs are in range, so are the
1192 outputs. */
1193 if (modulus && integer_pow2p (modulus))
1194 modulus = NULL_TREE;
1195 goto common;
1196
1197 case COMPLEX_EXPR:
1198 gcc_assert (TREE_TYPE (result_type) == left_base_type
1199 && TREE_TYPE (result_type) == right_base_type);
1200 left_operand = convert (left_base_type, left_operand);
1201 right_operand = convert (right_base_type, right_operand);
1202 break;
1203
1204 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1205 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1206 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1207 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1208 /* These always produce results lower than either operand. */
1209 modulus = NULL_TREE;
1210 goto common;
1211
1212 case POINTER_PLUS_EXPR:
1213 gcc_assert (operation_type == left_base_type
1214 && sizetype == right_base_type);
1215 left_operand = convert (operation_type, left_operand);
1216 right_operand = convert (sizetype, right_operand);
1217 break;
1218
1219 case PLUS_NOMOD_EXPR:
1220 case MINUS_NOMOD_EXPR:
1221 if (op_code == PLUS_NOMOD_EXPR)
1222 op_code = PLUS_EXPR;
1223 else
1224 op_code = MINUS_EXPR;
1225 modulus = NULL_TREE;
1226
1227 /* ... fall through ... */
1228
1229 case PLUS_EXPR:
1230 case MINUS_EXPR:
1231 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1232 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1233 these types but can generate addition/subtraction for Succ/Pred. */
1234 if (operation_type
1235 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1236 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1237 operation_type = left_base_type = right_base_type
1238 = gnat_type_for_mode (TYPE_MODE (operation_type),
1239 TYPE_UNSIGNED (operation_type));
1240
1241 /* ... fall through ... */
1242
1243 default:
1244 common:
1245 /* The result type should be the same as the base types of the
1246 both operands (and they should be the same). Convert
1247 everything to the result type. */
1248
1249 gcc_assert (operation_type == left_base_type
1250 && left_base_type == right_base_type);
1251 left_operand = convert (operation_type, left_operand);
1252 right_operand = convert (operation_type, right_operand);
1253 }
1254
1255 if (modulus && !integer_pow2p (modulus))
1256 {
1257 result = nonbinary_modular_operation (op_code, operation_type,
1258 left_operand, right_operand);
1259 modulus = NULL_TREE;
1260 }
1261 /* If either operand is a NULL_EXPR, just return a new one. */
1262 else if (TREE_CODE (left_operand) == NULL_EXPR)
1263 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1264 else if (TREE_CODE (right_operand) == NULL_EXPR)
1265 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1266 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1267 result = fold (build4 (op_code, operation_type, left_operand,
1268 right_operand, NULL_TREE, NULL_TREE));
1269 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1270 result = build2 (op_code, void_type_node, left_operand, right_operand);
1271 else
1272 result
1273 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1274
1275 if (TREE_CONSTANT (result))
1276 ;
1277 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1278 {
1279 if (TYPE_VOLATILE (operation_type))
1280 TREE_THIS_VOLATILE (result) = 1;
1281 }
1282 else
1283 TREE_CONSTANT (result)
1284 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1285
1286 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1287
1288 /* If we are working with modular types, perform the MOD operation
1289 if something above hasn't eliminated the need for it. */
1290 if (modulus)
1291 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1292 convert (operation_type, modulus));
1293
1294 if (result_type && result_type != operation_type)
1295 result = convert (result_type, result);
1296
1297 return result;
1298 }
1299 \f
1300 /* Similar, but for unary operations. */
1301
1302 tree
1303 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1304 {
1305 tree type = TREE_TYPE (operand);
1306 tree base_type = get_base_type (type);
1307 tree operation_type = result_type;
1308 tree result;
1309
1310 if (operation_type
1311 && TREE_CODE (operation_type) == RECORD_TYPE
1312 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1313 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1314
1315 if (operation_type
1316 && TREE_CODE (operation_type) == INTEGER_TYPE
1317 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1318 operation_type = get_base_type (operation_type);
1319
1320 switch (op_code)
1321 {
1322 case REALPART_EXPR:
1323 case IMAGPART_EXPR:
1324 if (!operation_type)
1325 result_type = operation_type = TREE_TYPE (type);
1326 else
1327 gcc_assert (result_type == TREE_TYPE (type));
1328
1329 result = fold_build1 (op_code, operation_type, operand);
1330 break;
1331
1332 case TRUTH_NOT_EXPR:
1333 gcc_checking_assert
1334 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1335 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1336 /* When not optimizing, fold the result as invert_truthvalue_loc
1337 doesn't fold the result of comparisons. This is intended to undo
1338 the trick used for boolean rvalues in gnat_to_gnu. */
1339 if (!optimize)
1340 result = fold (result);
1341 break;
1342
1343 case ATTR_ADDR_EXPR:
1344 case ADDR_EXPR:
1345 switch (TREE_CODE (operand))
1346 {
1347 case INDIRECT_REF:
1348 case UNCONSTRAINED_ARRAY_REF:
1349 result = TREE_OPERAND (operand, 0);
1350
1351 /* Make sure the type here is a pointer, not a reference.
1352 GCC wants pointer types for function addresses. */
1353 if (!result_type)
1354 result_type = build_pointer_type (type);
1355
1356 /* If the underlying object can alias everything, propagate the
1357 property since we are effectively retrieving the object. */
1358 if (POINTER_TYPE_P (TREE_TYPE (result))
1359 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1360 {
1361 if (TREE_CODE (result_type) == POINTER_TYPE
1362 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1363 result_type
1364 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1365 TYPE_MODE (result_type),
1366 true);
1367 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1368 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1369 result_type
1370 = build_reference_type_for_mode (TREE_TYPE (result_type),
1371 TYPE_MODE (result_type),
1372 true);
1373 }
1374 break;
1375
1376 case NULL_EXPR:
1377 result = operand;
1378 TREE_TYPE (result) = type = build_pointer_type (type);
1379 break;
1380
1381 case COMPOUND_EXPR:
1382 /* Fold a compound expression if it has unconstrained array type
1383 since the middle-end cannot handle it. But we don't it in the
1384 general case because it may introduce aliasing issues if the
1385 first operand is an indirect assignment and the second operand
1386 the corresponding address, e.g. for an allocator. */
1387 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1388 {
1389 result = build_unary_op (ADDR_EXPR, result_type,
1390 TREE_OPERAND (operand, 1));
1391 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1392 TREE_OPERAND (operand, 0), result);
1393 break;
1394 }
1395 goto common;
1396
1397 case ARRAY_REF:
1398 case ARRAY_RANGE_REF:
1399 case COMPONENT_REF:
1400 case BIT_FIELD_REF:
1401 /* If this is for 'Address, find the address of the prefix and add
1402 the offset to the field. Otherwise, do this the normal way. */
1403 if (op_code == ATTR_ADDR_EXPR)
1404 {
1405 HOST_WIDE_INT bitsize;
1406 HOST_WIDE_INT bitpos;
1407 tree offset, inner;
1408 machine_mode mode;
1409 int unsignedp, reversep, volatilep;
1410
1411 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1412 &mode, &unsignedp, &reversep,
1413 &volatilep, false);
1414
1415 /* If INNER is a padding type whose field has a self-referential
1416 size, convert to that inner type. We know the offset is zero
1417 and we need to have that type visible. */
1418 if (type_is_padding_self_referential (TREE_TYPE (inner)))
1419 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1420 inner);
1421
1422 /* Compute the offset as a byte offset from INNER. */
1423 if (!offset)
1424 offset = size_zero_node;
1425
1426 offset = size_binop (PLUS_EXPR, offset,
1427 size_int (bitpos / BITS_PER_UNIT));
1428
1429 /* Take the address of INNER, convert it to a pointer to our type
1430 and add the offset. */
1431 inner = build_unary_op (ADDR_EXPR,
1432 build_pointer_type (TREE_TYPE (operand)),
1433 inner);
1434 result = build_binary_op (POINTER_PLUS_EXPR, TREE_TYPE (inner),
1435 inner, offset);
1436 break;
1437 }
1438 goto common;
1439
1440 case CONSTRUCTOR:
1441 /* If this is just a constructor for a padded record, we can
1442 just take the address of the single field and convert it to
1443 a pointer to our type. */
1444 if (TYPE_IS_PADDING_P (type))
1445 {
1446 result
1447 = build_unary_op (ADDR_EXPR,
1448 build_pointer_type (TREE_TYPE (operand)),
1449 CONSTRUCTOR_ELT (operand, 0)->value);
1450 break;
1451 }
1452 goto common;
1453
1454 case NOP_EXPR:
1455 if (AGGREGATE_TYPE_P (type)
1456 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1457 return build_unary_op (ADDR_EXPR, result_type,
1458 TREE_OPERAND (operand, 0));
1459
1460 /* ... fallthru ... */
1461
1462 case VIEW_CONVERT_EXPR:
1463 /* If this just a variant conversion or if the conversion doesn't
1464 change the mode, get the result type from this type and go down.
1465 This is needed for conversions of CONST_DECLs, to eventually get
1466 to the address of their CORRESPONDING_VARs. */
1467 if ((TYPE_MAIN_VARIANT (type)
1468 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1469 || (TYPE_MODE (type) != BLKmode
1470 && (TYPE_MODE (type)
1471 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1472 return build_unary_op (ADDR_EXPR,
1473 (result_type ? result_type
1474 : build_pointer_type (type)),
1475 TREE_OPERAND (operand, 0));
1476 goto common;
1477
1478 case CONST_DECL:
1479 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1480
1481 /* ... fall through ... */
1482
1483 default:
1484 common:
1485
1486 /* If we are taking the address of a padded record whose field
1487 contains a template, take the address of the field. */
1488 if (TYPE_IS_PADDING_P (type)
1489 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1490 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1491 {
1492 type = TREE_TYPE (TYPE_FIELDS (type));
1493 operand = convert (type, operand);
1494 }
1495
1496 gnat_mark_addressable (operand);
1497 result = build_fold_addr_expr (operand);
1498 }
1499
1500 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1501 break;
1502
1503 case INDIRECT_REF:
1504 {
1505 tree t = remove_conversions (operand, false);
1506 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1507
1508 /* If TYPE is a thin pointer, either first retrieve the base if this
1509 is an expression with an offset built for the initialization of an
1510 object with an unconstrained nominal subtype, or else convert to
1511 the fat pointer. */
1512 if (TYPE_IS_THIN_POINTER_P (type))
1513 {
1514 tree rec_type = TREE_TYPE (type);
1515
1516 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1517 && TREE_OPERAND (operand, 1)
1518 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1519 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1520 {
1521 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1522 type = TREE_TYPE (operand);
1523 }
1524 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1525 {
1526 operand
1527 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1528 operand);
1529 type = TREE_TYPE (operand);
1530 }
1531 }
1532
1533 /* If we want to refer to an unconstrained array, use the appropriate
1534 expression. But this will never survive down to the back-end. */
1535 if (TYPE_IS_FAT_POINTER_P (type))
1536 {
1537 result = build1 (UNCONSTRAINED_ARRAY_REF,
1538 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1539 TREE_READONLY (result)
1540 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1541 }
1542
1543 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1544 else if (TREE_CODE (operand) == ADDR_EXPR)
1545 result = TREE_OPERAND (operand, 0);
1546
1547 /* Otherwise, build and fold the indirect reference. */
1548 else
1549 {
1550 result = build_fold_indirect_ref (operand);
1551 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1552 }
1553
1554 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1555 {
1556 TREE_SIDE_EFFECTS (result) = 1;
1557 if (TREE_CODE (result) == INDIRECT_REF)
1558 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1559 }
1560
1561 if ((TREE_CODE (result) == INDIRECT_REF
1562 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1563 && can_never_be_null)
1564 TREE_THIS_NOTRAP (result) = 1;
1565
1566 break;
1567 }
1568
1569 case NEGATE_EXPR:
1570 case BIT_NOT_EXPR:
1571 {
1572 tree modulus = ((operation_type
1573 && TREE_CODE (operation_type) == INTEGER_TYPE
1574 && TYPE_MODULAR_P (operation_type))
1575 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1576 int mod_pow2 = modulus && integer_pow2p (modulus);
1577
1578 /* If this is a modular type, there are various possibilities
1579 depending on the operation and whether the modulus is a
1580 power of two or not. */
1581
1582 if (modulus)
1583 {
1584 gcc_assert (operation_type == base_type);
1585 operand = convert (operation_type, operand);
1586
1587 /* The fastest in the negate case for binary modulus is
1588 the straightforward code; the TRUNC_MOD_EXPR below
1589 is an AND operation. */
1590 if (op_code == NEGATE_EXPR && mod_pow2)
1591 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1592 fold_build1 (NEGATE_EXPR, operation_type,
1593 operand),
1594 modulus);
1595
1596 /* For nonbinary negate case, return zero for zero operand,
1597 else return the modulus minus the operand. If the modulus
1598 is a power of two minus one, we can do the subtraction
1599 as an XOR since it is equivalent and faster on most machines. */
1600 else if (op_code == NEGATE_EXPR && !mod_pow2)
1601 {
1602 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1603 modulus,
1604 convert (operation_type,
1605 integer_one_node))))
1606 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1607 operand, modulus);
1608 else
1609 result = fold_build2 (MINUS_EXPR, operation_type,
1610 modulus, operand);
1611
1612 result = fold_build3 (COND_EXPR, operation_type,
1613 fold_build2 (NE_EXPR,
1614 boolean_type_node,
1615 operand,
1616 convert
1617 (operation_type,
1618 integer_zero_node)),
1619 result, operand);
1620 }
1621 else
1622 {
1623 /* For the NOT cases, we need a constant equal to
1624 the modulus minus one. For a binary modulus, we
1625 XOR against the constant and subtract the operand from
1626 that constant for nonbinary modulus. */
1627
1628 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1629 convert (operation_type,
1630 integer_one_node));
1631
1632 if (mod_pow2)
1633 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1634 operand, cnst);
1635 else
1636 result = fold_build2 (MINUS_EXPR, operation_type,
1637 cnst, operand);
1638 }
1639
1640 break;
1641 }
1642 }
1643
1644 /* ... fall through ... */
1645
1646 default:
1647 gcc_assert (operation_type == base_type);
1648 result = fold_build1 (op_code, operation_type,
1649 convert (operation_type, operand));
1650 }
1651
1652 if (result_type && TREE_TYPE (result) != result_type)
1653 result = convert (result_type, result);
1654
1655 return result;
1656 }
1657 \f
1658 /* Similar, but for COND_EXPR. */
1659
1660 tree
1661 build_cond_expr (tree result_type, tree condition_operand,
1662 tree true_operand, tree false_operand)
1663 {
1664 bool addr_p = false;
1665 tree result;
1666
1667 /* The front-end verified that result, true and false operands have
1668 same base type. Convert everything to the result type. */
1669 true_operand = convert (result_type, true_operand);
1670 false_operand = convert (result_type, false_operand);
1671
1672 /* If the result type is unconstrained, take the address of the operands and
1673 then dereference the result. Likewise if the result type is passed by
1674 reference, because creating a temporary of this type is not allowed. */
1675 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1676 || TYPE_IS_BY_REFERENCE_P (result_type)
1677 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1678 {
1679 result_type = build_pointer_type (result_type);
1680 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1681 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1682 addr_p = true;
1683 }
1684
1685 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1686 true_operand, false_operand);
1687
1688 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1689 in both arms, make sure it gets evaluated by moving it ahead of the
1690 conditional expression. This is necessary because it is evaluated
1691 in only one place at run time and would otherwise be uninitialized
1692 in one of the arms. */
1693 true_operand = skip_simple_arithmetic (true_operand);
1694 false_operand = skip_simple_arithmetic (false_operand);
1695
1696 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1697 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1698
1699 if (addr_p)
1700 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1701
1702 return result;
1703 }
1704
1705 /* Similar, but for COMPOUND_EXPR. */
1706
1707 tree
1708 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1709 {
1710 bool addr_p = false;
1711 tree result;
1712
1713 /* If the result type is unconstrained, take the address of the operand and
1714 then dereference the result. Likewise if the result type is passed by
1715 reference, but this is natively handled in the gimplifier. */
1716 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1717 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1718 {
1719 result_type = build_pointer_type (result_type);
1720 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1721 addr_p = true;
1722 }
1723
1724 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1725 expr_operand);
1726
1727 if (addr_p)
1728 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1729
1730 return result;
1731 }
1732 \f
1733 /* Conveniently construct a function call expression. FNDECL names the
1734 function to be called, N is the number of arguments, and the "..."
1735 parameters are the argument expressions. Unlike build_call_expr
1736 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1737
1738 tree
1739 build_call_n_expr (tree fndecl, int n, ...)
1740 {
1741 va_list ap;
1742 tree fntype = TREE_TYPE (fndecl);
1743 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1744
1745 va_start (ap, n);
1746 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1747 va_end (ap);
1748 return fn;
1749 }
1750 \f
1751 /* Expand the SLOC of GNAT_NODE, if present, into tree location information
1752 pointed to by FILENAME, LINE and COL. Fall back to the current location
1753 if GNAT_NODE is absent or has no SLOC. */
1754
1755 static void
1756 expand_sloc (Node_Id gnat_node, tree *filename, tree *line, tree *col)
1757 {
1758 const char *str;
1759 int line_number, column_number;
1760
1761 if (Debug_Flag_NN || Exception_Locations_Suppressed)
1762 {
1763 str = "";
1764 line_number = 0;
1765 column_number = 0;
1766 }
1767 else if (Present (gnat_node) && Sloc (gnat_node) != No_Location)
1768 {
1769 str = Get_Name_String
1770 (Debug_Source_Name (Get_Source_File_Index (Sloc (gnat_node))));
1771 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1772 column_number = Get_Column_Number (Sloc (gnat_node));
1773 }
1774 else
1775 {
1776 str = lbasename (LOCATION_FILE (input_location));
1777 line_number = LOCATION_LINE (input_location);
1778 column_number = LOCATION_COLUMN (input_location);
1779 }
1780
1781 const int len = strlen (str);
1782 *filename = build_string (len, str);
1783 TREE_TYPE (*filename) = build_array_type (unsigned_char_type_node,
1784 build_index_type (size_int (len)));
1785 *line = build_int_cst (NULL_TREE, line_number);
1786 if (col)
1787 *col = build_int_cst (NULL_TREE, column_number);
1788 }
1789
1790 /* Build a call to a function that raises an exception and passes file name
1791 and line number, if requested. MSG says which exception function to call.
1792 GNAT_NODE is the node conveying the source location for which the error
1793 should be signaled, or Empty in which case the error is signaled for the
1794 current location. KIND says which kind of exception node this is for,
1795 among N_Raise_{Constraint,Storage,Program}_Error. */
1796
1797 tree
1798 build_call_raise (int msg, Node_Id gnat_node, char kind)
1799 {
1800 tree fndecl = gnat_raise_decls[msg];
1801 tree label = get_exception_label (kind);
1802 tree filename, line;
1803
1804 /* If this is to be done as a goto, handle that case. */
1805 if (label)
1806 {
1807 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1808 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1809
1810 /* If Local_Raise is present, build Local_Raise (Exception'Identity). */
1811 if (Present (local_raise))
1812 {
1813 tree gnu_local_raise
1814 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1815 tree gnu_exception_entity
1816 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1817 tree gnu_call
1818 = build_call_n_expr (gnu_local_raise, 1,
1819 build_unary_op (ADDR_EXPR, NULL_TREE,
1820 gnu_exception_entity));
1821 gnu_result
1822 = build2 (COMPOUND_EXPR, void_type_node, gnu_call, gnu_result);
1823 }
1824
1825 return gnu_result;
1826 }
1827
1828 expand_sloc (gnat_node, &filename, &line, NULL);
1829
1830 return
1831 build_call_n_expr (fndecl, 2,
1832 build1 (ADDR_EXPR,
1833 build_pointer_type (unsigned_char_type_node),
1834 filename),
1835 line);
1836 }
1837
1838 /* Similar to build_call_raise, with extra information about the column
1839 where the check failed. */
1840
1841 tree
1842 build_call_raise_column (int msg, Node_Id gnat_node)
1843 {
1844 tree fndecl = gnat_raise_decls_ext[msg];
1845 tree filename, line, col;
1846
1847 expand_sloc (gnat_node, &filename, &line, &col);
1848
1849 return
1850 build_call_n_expr (fndecl, 3,
1851 build1 (ADDR_EXPR,
1852 build_pointer_type (unsigned_char_type_node),
1853 filename),
1854 line, col);
1855 }
1856
1857 /* Similar to build_call_raise_column, for an index or range check exception ,
1858 with extra information of the form "INDEX out of range FIRST..LAST". */
1859
1860 tree
1861 build_call_raise_range (int msg, Node_Id gnat_node,
1862 tree index, tree first, tree last)
1863 {
1864 tree fndecl = gnat_raise_decls_ext[msg];
1865 tree filename, line, col;
1866
1867 expand_sloc (gnat_node, &filename, &line, &col);
1868
1869 return
1870 build_call_n_expr (fndecl, 6,
1871 build1 (ADDR_EXPR,
1872 build_pointer_type (unsigned_char_type_node),
1873 filename),
1874 line, col,
1875 convert (integer_type_node, index),
1876 convert (integer_type_node, first),
1877 convert (integer_type_node, last));
1878 }
1879 \f
1880 /* qsort comparer for the bit positions of two constructor elements
1881 for record components. */
1882
1883 static int
1884 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1885 {
1886 const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
1887 const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
1888 const_tree const field1 = elmt1->index;
1889 const_tree const field2 = elmt2->index;
1890 const int ret
1891 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1892
1893 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1894 }
1895
1896 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1897
1898 tree
1899 gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
1900 {
1901 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1902 bool read_only = true;
1903 bool side_effects = false;
1904 tree result, obj, val;
1905 unsigned int n_elmts;
1906
1907 /* Scan the elements to see if they are all constant or if any has side
1908 effects, to let us set global flags on the resulting constructor. Count
1909 the elements along the way for possible sorting purposes below. */
1910 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1911 {
1912 /* The predicate must be in keeping with output_constructor. */
1913 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1914 || (TREE_CODE (type) == RECORD_TYPE
1915 && CONSTRUCTOR_BITFIELD_P (obj)
1916 && !initializer_constant_valid_for_bitfield_p (val))
1917 || !initializer_constant_valid_p (val,
1918 TREE_TYPE (val),
1919 TYPE_REVERSE_STORAGE_ORDER (type)))
1920 allconstant = false;
1921
1922 if (!TREE_READONLY (val))
1923 read_only = false;
1924
1925 if (TREE_SIDE_EFFECTS (val))
1926 side_effects = true;
1927 }
1928
1929 /* For record types with constant components only, sort field list
1930 by increasing bit position. This is necessary to ensure the
1931 constructor can be output as static data. */
1932 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1933 v->qsort (compare_elmt_bitpos);
1934
1935 result = build_constructor (type, v);
1936 CONSTRUCTOR_NO_CLEARING (result) = 1;
1937 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1938 TREE_SIDE_EFFECTS (result) = side_effects;
1939 TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
1940 return result;
1941 }
1942 \f
1943 /* Return a COMPONENT_REF to access FIELD in RECORD, or NULL_TREE if the field
1944 is not found in the record. Don't fold the result if NO_FOLD is true. */
1945
1946 static tree
1947 build_simple_component_ref (tree record, tree field, bool no_fold)
1948 {
1949 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (record));
1950 tree ref;
1951
1952 gcc_assert (RECORD_OR_UNION_TYPE_P (type) && COMPLETE_TYPE_P (type));
1953
1954 /* Try to fold a conversion from another record or union type unless the type
1955 contains a placeholder as it might be needed for a later substitution. */
1956 if (TREE_CODE (record) == VIEW_CONVERT_EXPR
1957 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (record, 0)))
1958 && !type_contains_placeholder_p (type))
1959 {
1960 tree op = TREE_OPERAND (record, 0);
1961
1962 /* If this is an unpadding operation, convert the underlying object to
1963 the unpadded type directly. */
1964 if (TYPE_IS_PADDING_P (type) && field == TYPE_FIELDS (type))
1965 return convert (TREE_TYPE (field), op);
1966
1967 /* Otherwise try to access FIELD directly in the underlying type, but
1968 make sure that the form of the reference doesn't change too much;
1969 this can happen for an unconstrained bit-packed array type whose
1970 constrained form can be an integer type. */
1971 ref = build_simple_component_ref (op, field, no_fold);
1972 if (ref && TREE_CODE (TREE_TYPE (ref)) == TREE_CODE (TREE_TYPE (field)))
1973 return ref;
1974 }
1975
1976 /* If this field is not in the specified record, see if we can find a field
1977 in the specified record whose original field is the same as this one. */
1978 if (DECL_CONTEXT (field) != type)
1979 {
1980 tree new_field;
1981
1982 /* First loop through normal components. */
1983 for (new_field = TYPE_FIELDS (type);
1984 new_field;
1985 new_field = DECL_CHAIN (new_field))
1986 if (SAME_FIELD_P (field, new_field))
1987 break;
1988
1989 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
1990 component in the first search. Doing this search in two steps is
1991 required to avoid hidden homonymous fields in the _Parent field. */
1992 if (!new_field)
1993 for (new_field = TYPE_FIELDS (type);
1994 new_field;
1995 new_field = DECL_CHAIN (new_field))
1996 if (DECL_INTERNAL_P (new_field)
1997 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (new_field)))
1998 {
1999 tree field_ref
2000 = build_simple_component_ref (record, new_field, no_fold);
2001 ref = build_simple_component_ref (field_ref, field, no_fold);
2002 if (ref)
2003 return ref;
2004 }
2005
2006 field = new_field;
2007 }
2008
2009 if (!field)
2010 return NULL_TREE;
2011
2012 /* If the field's offset has overflowed, do not try to access it, as doing
2013 so may trigger sanity checks deeper in the back-end. Note that we don't
2014 need to warn since this will be done on trying to declare the object. */
2015 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
2016 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
2017 return NULL_TREE;
2018
2019 ref = build3 (COMPONENT_REF, TREE_TYPE (field), record, field, NULL_TREE);
2020
2021 if (TREE_READONLY (record)
2022 || TREE_READONLY (field)
2023 || TYPE_READONLY (type))
2024 TREE_READONLY (ref) = 1;
2025
2026 if (TREE_THIS_VOLATILE (record)
2027 || TREE_THIS_VOLATILE (field)
2028 || TYPE_VOLATILE (type))
2029 TREE_THIS_VOLATILE (ref) = 1;
2030
2031 if (no_fold)
2032 return ref;
2033
2034 /* The generic folder may punt in this case because the inner array type
2035 can be self-referential, but folding is in fact not problematic. */
2036 if (TREE_CODE (record) == CONSTRUCTOR
2037 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record)))
2038 {
2039 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (record);
2040 unsigned HOST_WIDE_INT idx;
2041 tree index, value;
2042 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2043 if (index == field)
2044 return value;
2045 return ref;
2046 }
2047
2048 return fold (ref);
2049 }
2050
2051 /* Likewise, but return NULL_EXPR and generate a Constraint_Error if the
2052 field is not found in the record. */
2053
2054 tree
2055 build_component_ref (tree record, tree field, bool no_fold)
2056 {
2057 tree ref = build_simple_component_ref (record, field, no_fold);
2058 if (ref)
2059 return ref;
2060
2061 /* Assume this is an invalid user field so raise Constraint_Error. */
2062 return build1 (NULL_EXPR, TREE_TYPE (field),
2063 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2064 N_Raise_Constraint_Error));
2065 }
2066 \f
2067 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2068 identically. Process the case where a GNAT_PROC to call is provided. */
2069
2070 static inline tree
2071 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2072 Entity_Id gnat_proc, Entity_Id gnat_pool)
2073 {
2074 tree gnu_proc = gnat_to_gnu (gnat_proc);
2075 tree gnu_call;
2076
2077 /* A storage pool's underlying type is a record type (for both predefined
2078 storage pools and GNAT simple storage pools). The secondary stack uses
2079 the same mechanism, but its pool object (SS_Pool) is an integer. */
2080 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2081 {
2082 /* The size is the third parameter; the alignment is the
2083 same type. */
2084 Entity_Id gnat_size_type
2085 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2086 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2087
2088 tree gnu_pool = gnat_to_gnu (gnat_pool);
2089 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2090 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2091
2092 gnu_size = convert (gnu_size_type, gnu_size);
2093 gnu_align = convert (gnu_size_type, gnu_align);
2094
2095 /* The first arg is always the address of the storage pool; next
2096 comes the address of the object, for a deallocator, then the
2097 size and alignment. */
2098 if (gnu_obj)
2099 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2100 gnu_size, gnu_align);
2101 else
2102 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2103 gnu_size, gnu_align);
2104 }
2105
2106 /* Secondary stack case. */
2107 else
2108 {
2109 /* The size is the second parameter. */
2110 Entity_Id gnat_size_type
2111 = Etype (Next_Formal (First_Formal (gnat_proc)));
2112 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2113
2114 gnu_size = convert (gnu_size_type, gnu_size);
2115
2116 /* The first arg is the address of the object, for a deallocator,
2117 then the size. */
2118 if (gnu_obj)
2119 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2120 else
2121 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2122 }
2123
2124 return gnu_call;
2125 }
2126
2127 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2128 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2129 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2130 latter offers. */
2131
2132 static inline tree
2133 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2134 {
2135 /* When the DATA_TYPE alignment is stricter than what malloc offers
2136 (super-aligned case), we allocate an "aligning" wrapper type and return
2137 the address of its single data field with the malloc's return value
2138 stored just in front. */
2139
2140 unsigned int data_align = TYPE_ALIGN (data_type);
2141 unsigned int system_allocator_alignment
2142 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2143
2144 tree aligning_type
2145 = ((data_align > system_allocator_alignment)
2146 ? make_aligning_type (data_type, data_align, data_size,
2147 system_allocator_alignment,
2148 POINTER_SIZE / BITS_PER_UNIT,
2149 gnat_node)
2150 : NULL_TREE);
2151
2152 tree size_to_malloc
2153 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2154
2155 tree malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2156
2157 if (aligning_type)
2158 {
2159 /* Latch malloc's return value and get a pointer to the aligning field
2160 first. */
2161 tree storage_ptr = gnat_protect_expr (malloc_ptr);
2162
2163 tree aligning_record_addr
2164 = convert (build_pointer_type (aligning_type), storage_ptr);
2165
2166 tree aligning_record
2167 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2168
2169 tree aligning_field
2170 = build_component_ref (aligning_record, TYPE_FIELDS (aligning_type),
2171 false);
2172
2173 tree aligning_field_addr
2174 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2175
2176 /* Then arrange to store the allocator's return value ahead
2177 and return. */
2178 tree storage_ptr_slot_addr
2179 = build_binary_op (POINTER_PLUS_EXPR, ptr_type_node,
2180 convert (ptr_type_node, aligning_field_addr),
2181 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2182 / BITS_PER_UNIT));
2183
2184 tree storage_ptr_slot
2185 = build_unary_op (INDIRECT_REF, NULL_TREE,
2186 convert (build_pointer_type (ptr_type_node),
2187 storage_ptr_slot_addr));
2188
2189 return
2190 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2191 build_binary_op (INIT_EXPR, NULL_TREE,
2192 storage_ptr_slot, storage_ptr),
2193 aligning_field_addr);
2194 }
2195 else
2196 return malloc_ptr;
2197 }
2198
2199 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2200 designated by DATA_PTR using the __gnat_free entry point. */
2201
2202 static inline tree
2203 maybe_wrap_free (tree data_ptr, tree data_type)
2204 {
2205 /* In the regular alignment case, we pass the data pointer straight to free.
2206 In the superaligned case, we need to retrieve the initial allocator
2207 return value, stored in front of the data block at allocation time. */
2208
2209 unsigned int data_align = TYPE_ALIGN (data_type);
2210 unsigned int system_allocator_alignment
2211 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2212
2213 tree free_ptr;
2214
2215 if (data_align > system_allocator_alignment)
2216 {
2217 /* DATA_FRONT_PTR (void *)
2218 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2219 tree data_front_ptr
2220 = build_binary_op
2221 (POINTER_PLUS_EXPR, ptr_type_node,
2222 convert (ptr_type_node, data_ptr),
2223 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2224
2225 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2226 free_ptr
2227 = build_unary_op
2228 (INDIRECT_REF, NULL_TREE,
2229 convert (build_pointer_type (ptr_type_node), data_front_ptr));
2230 }
2231 else
2232 free_ptr = data_ptr;
2233
2234 return build_call_n_expr (free_decl, 1, free_ptr);
2235 }
2236
2237 /* Build a GCC tree to call an allocation or deallocation function.
2238 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2239 generate an allocator.
2240
2241 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2242 object type, used to determine the to-be-honored address alignment.
2243 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2244 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2245 to provide an error location for restriction violation messages. */
2246
2247 tree
2248 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2249 Entity_Id gnat_proc, Entity_Id gnat_pool,
2250 Node_Id gnat_node)
2251 {
2252 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
2253
2254 /* Explicit proc to call ? This one is assumed to deal with the type
2255 alignment constraints. */
2256 if (Present (gnat_proc))
2257 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2258 gnat_proc, gnat_pool);
2259
2260 /* Otherwise, object to "free" or "malloc" with possible special processing
2261 for alignments stricter than what the default allocator honors. */
2262 else if (gnu_obj)
2263 return maybe_wrap_free (gnu_obj, gnu_type);
2264 else
2265 {
2266 /* Assert that we no longer can be called with this special pool. */
2267 gcc_assert (gnat_pool != -1);
2268
2269 /* Check that we aren't violating the associated restriction. */
2270 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2271 {
2272 Check_No_Implicit_Heap_Alloc (gnat_node);
2273 if (Has_Task (Etype (gnat_node)))
2274 Check_No_Implicit_Task_Alloc (gnat_node);
2275 if (Has_Protected (Etype (gnat_node)))
2276 Check_No_Implicit_Protected_Alloc (gnat_node);
2277 }
2278 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2279 }
2280 }
2281 \f
2282 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2283 initial value is INIT, if INIT is nonzero. Convert the expression to
2284 RESULT_TYPE, which must be some pointer type, and return the result.
2285
2286 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2287 the storage pool to use. GNAT_NODE is used to provide an error
2288 location for restriction violation messages. If IGNORE_INIT_TYPE is
2289 true, ignore the type of INIT for the purpose of determining the size;
2290 this will cause the maximum size to be allocated if TYPE is of
2291 self-referential size. */
2292
2293 tree
2294 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2295 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2296 {
2297 tree size, storage, storage_deref, storage_init;
2298
2299 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2300 if (init && TREE_CODE (init) == NULL_EXPR)
2301 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2302
2303 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2304 else if (init && TREE_CODE (init) == COND_EXPR)
2305 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2306 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2307 gnat_proc, gnat_pool, gnat_node,
2308 ignore_init_type),
2309 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2310 gnat_proc, gnat_pool, gnat_node,
2311 ignore_init_type));
2312
2313 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2314 sizes of the object and its template. Allocate the whole thing and
2315 fill in the parts that are known. */
2316 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2317 {
2318 tree storage_type
2319 = build_unc_object_type_from_ptr (result_type, type,
2320 get_identifier ("ALLOC"), false);
2321 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2322 tree storage_ptr_type = build_pointer_type (storage_type);
2323
2324 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2325 init);
2326
2327 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2328 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2329 size = size_int (-1);
2330
2331 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2332 gnat_proc, gnat_pool, gnat_node);
2333 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2334 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2335 TREE_THIS_NOTRAP (storage_deref) = 1;
2336
2337 /* If there is an initializing expression, then make a constructor for
2338 the entire object including the bounds and copy it into the object.
2339 If there is no initializing expression, just set the bounds. */
2340 if (init)
2341 {
2342 vec<constructor_elt, va_gc> *v;
2343 vec_alloc (v, 2);
2344
2345 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2346 build_template (template_type, type, init));
2347 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2348 init);
2349 storage_init
2350 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2351 gnat_build_constructor (storage_type, v));
2352 }
2353 else
2354 storage_init
2355 = build_binary_op (INIT_EXPR, NULL_TREE,
2356 build_component_ref (storage_deref,
2357 TYPE_FIELDS (storage_type),
2358 false),
2359 build_template (template_type, type, NULL_TREE));
2360
2361 return build2 (COMPOUND_EXPR, result_type,
2362 storage_init, convert (result_type, storage));
2363 }
2364
2365 size = TYPE_SIZE_UNIT (type);
2366
2367 /* If we have an initializing expression, see if its size is simpler
2368 than the size from the type. */
2369 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2370 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2371 || CONTAINS_PLACEHOLDER_P (size)))
2372 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2373
2374 /* If the size is still self-referential, reference the initializing
2375 expression, if it is present. If not, this must have been a
2376 call to allocate a library-level object, in which case we use
2377 the maximum size. */
2378 if (CONTAINS_PLACEHOLDER_P (size))
2379 {
2380 if (!ignore_init_type && init)
2381 size = substitute_placeholder_in_expr (size, init);
2382 else
2383 size = max_size (size, true);
2384 }
2385
2386 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2387 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2388 size = size_int (-1);
2389
2390 storage = convert (result_type,
2391 build_call_alloc_dealloc (NULL_TREE, size, type,
2392 gnat_proc, gnat_pool,
2393 gnat_node));
2394
2395 /* If we have an initial value, protect the new address, assign the value
2396 and return the address with a COMPOUND_EXPR. */
2397 if (init)
2398 {
2399 storage = gnat_protect_expr (storage);
2400 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2401 TREE_THIS_NOTRAP (storage_deref) = 1;
2402 storage_init
2403 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2404 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2405 }
2406
2407 return storage;
2408 }
2409 \f
2410 /* Indicate that we need to take the address of T and that it therefore
2411 should not be allocated in a register. Returns true if successful. */
2412
2413 bool
2414 gnat_mark_addressable (tree t)
2415 {
2416 while (true)
2417 switch (TREE_CODE (t))
2418 {
2419 case ADDR_EXPR:
2420 case COMPONENT_REF:
2421 case ARRAY_REF:
2422 case ARRAY_RANGE_REF:
2423 case REALPART_EXPR:
2424 case IMAGPART_EXPR:
2425 case VIEW_CONVERT_EXPR:
2426 case NON_LVALUE_EXPR:
2427 CASE_CONVERT:
2428 t = TREE_OPERAND (t, 0);
2429 break;
2430
2431 case COMPOUND_EXPR:
2432 t = TREE_OPERAND (t, 1);
2433 break;
2434
2435 case CONSTRUCTOR:
2436 TREE_ADDRESSABLE (t) = 1;
2437 return true;
2438
2439 case VAR_DECL:
2440 case PARM_DECL:
2441 case RESULT_DECL:
2442 TREE_ADDRESSABLE (t) = 1;
2443 return true;
2444
2445 case FUNCTION_DECL:
2446 TREE_ADDRESSABLE (t) = 1;
2447 return true;
2448
2449 case CONST_DECL:
2450 return DECL_CONST_CORRESPONDING_VAR (t)
2451 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2452
2453 default:
2454 return true;
2455 }
2456 }
2457 \f
2458 /* Return true if EXP is a stable expression for the purpose of the functions
2459 below and, therefore, can be returned unmodified by them. We accept things
2460 that are actual constants or that have already been handled. */
2461
2462 static bool
2463 gnat_stable_expr_p (tree exp)
2464 {
2465 enum tree_code code = TREE_CODE (exp);
2466 return TREE_CONSTANT (exp) || code == NULL_EXPR || code == SAVE_EXPR;
2467 }
2468
2469 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2470 but we know how to handle our own nodes. */
2471
2472 tree
2473 gnat_save_expr (tree exp)
2474 {
2475 tree type = TREE_TYPE (exp);
2476 enum tree_code code = TREE_CODE (exp);
2477
2478 if (gnat_stable_expr_p (exp))
2479 return exp;
2480
2481 if (code == UNCONSTRAINED_ARRAY_REF)
2482 {
2483 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2484 TREE_READONLY (t) = TYPE_READONLY (type);
2485 return t;
2486 }
2487
2488 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2489 This may be more efficient, but will also allow us to more easily find
2490 the match for the PLACEHOLDER_EXPR. */
2491 if (code == COMPONENT_REF
2492 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2493 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2494 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2495
2496 return save_expr (exp);
2497 }
2498
2499 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2500 is optimized under the assumption that EXP's value doesn't change before
2501 its subsequent reuse(s) except through its potential reevaluation. */
2502
2503 tree
2504 gnat_protect_expr (tree exp)
2505 {
2506 tree type = TREE_TYPE (exp);
2507 enum tree_code code = TREE_CODE (exp);
2508
2509 if (gnat_stable_expr_p (exp))
2510 return exp;
2511
2512 /* If EXP has no side effects, we theoretically don't need to do anything.
2513 However, we may be recursively passed more and more complex expressions
2514 involving checks which will be reused multiple times and eventually be
2515 unshared for gimplification; in order to avoid a complexity explosion
2516 at that point, we protect any expressions more complex than a simple
2517 arithmetic expression. */
2518 if (!TREE_SIDE_EFFECTS (exp))
2519 {
2520 tree inner = skip_simple_arithmetic (exp);
2521 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2522 return exp;
2523 }
2524
2525 /* If this is a conversion, protect what's inside the conversion. */
2526 if (code == NON_LVALUE_EXPR
2527 || CONVERT_EXPR_CODE_P (code)
2528 || code == VIEW_CONVERT_EXPR)
2529 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2530
2531 /* If we're indirectly referencing something, we only need to protect the
2532 address since the data itself can't change in these situations. */
2533 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2534 {
2535 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2536 TREE_READONLY (t) = TYPE_READONLY (type);
2537 return t;
2538 }
2539
2540 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2541 This may be more efficient, but will also allow us to more easily find
2542 the match for the PLACEHOLDER_EXPR. */
2543 if (code == COMPONENT_REF
2544 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2545 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2546 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2547
2548 /* If this is a fat pointer or something that can be placed in a register,
2549 just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
2550 returned via invisible reference in most ABIs so the temporary will
2551 directly be filled by the callee. */
2552 if (TYPE_IS_FAT_POINTER_P (type)
2553 || TYPE_MODE (type) != BLKmode
2554 || code == CALL_EXPR)
2555 return save_expr (exp);
2556
2557 /* Otherwise reference, protect the address and dereference. */
2558 return
2559 build_unary_op (INDIRECT_REF, type,
2560 save_expr (build_unary_op (ADDR_EXPR,
2561 build_reference_type (type),
2562 exp)));
2563 }
2564
2565 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2566 argument to force evaluation of everything. */
2567
2568 static tree
2569 gnat_stabilize_reference_1 (tree e, void *data)
2570 {
2571 const bool force = *(bool *)data;
2572 enum tree_code code = TREE_CODE (e);
2573 tree type = TREE_TYPE (e);
2574 tree result;
2575
2576 if (gnat_stable_expr_p (e))
2577 return e;
2578
2579 switch (TREE_CODE_CLASS (code))
2580 {
2581 case tcc_exceptional:
2582 case tcc_declaration:
2583 case tcc_comparison:
2584 case tcc_expression:
2585 case tcc_reference:
2586 case tcc_vl_exp:
2587 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2588 fat pointer. This may be more efficient, but will also allow
2589 us to more easily find the match for the PLACEHOLDER_EXPR. */
2590 if (code == COMPONENT_REF
2591 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2592 result
2593 = build3 (code, type,
2594 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2595 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2596 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2597 so that it will only be evaluated once. */
2598 /* The tcc_reference and tcc_comparison classes could be handled as
2599 below, but it is generally faster to only evaluate them once. */
2600 else if (TREE_SIDE_EFFECTS (e) || force)
2601 return save_expr (e);
2602 else
2603 return e;
2604 break;
2605
2606 case tcc_binary:
2607 /* Recursively stabilize each operand. */
2608 result
2609 = build2 (code, type,
2610 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2611 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), data));
2612 break;
2613
2614 case tcc_unary:
2615 /* Recursively stabilize each operand. */
2616 result
2617 = build1 (code, type,
2618 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data));
2619 break;
2620
2621 default:
2622 gcc_unreachable ();
2623 }
2624
2625 TREE_READONLY (result) = TREE_READONLY (e);
2626 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2627 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2628
2629 return result;
2630 }
2631
2632 /* This is equivalent to stabilize_reference in tree.c but we know how to
2633 handle our own nodes and we take extra arguments. FORCE says whether to
2634 force evaluation of everything in REF. INIT is set to the first arm of
2635 a COMPOUND_EXPR present in REF, if any. */
2636
2637 tree
2638 gnat_stabilize_reference (tree ref, bool force, tree *init)
2639 {
2640 return
2641 gnat_rewrite_reference (ref, gnat_stabilize_reference_1, &force, init);
2642 }
2643
2644 /* Rewrite reference REF and call FUNC on each expression within REF in the
2645 process. DATA is passed unmodified to FUNC. INIT is set to the first
2646 arm of a COMPOUND_EXPR present in REF, if any. */
2647
2648 tree
2649 gnat_rewrite_reference (tree ref, rewrite_fn func, void *data, tree *init)
2650 {
2651 tree type = TREE_TYPE (ref);
2652 enum tree_code code = TREE_CODE (ref);
2653 tree result;
2654
2655 switch (code)
2656 {
2657 case CONST_DECL:
2658 case VAR_DECL:
2659 case PARM_DECL:
2660 case RESULT_DECL:
2661 /* No action is needed in this case. */
2662 return ref;
2663
2664 CASE_CONVERT:
2665 case FLOAT_EXPR:
2666 case FIX_TRUNC_EXPR:
2667 case VIEW_CONVERT_EXPR:
2668 result
2669 = build1 (code, type,
2670 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2671 init));
2672 break;
2673
2674 case INDIRECT_REF:
2675 case UNCONSTRAINED_ARRAY_REF:
2676 result = build1 (code, type, func (TREE_OPERAND (ref, 0), data));
2677 break;
2678
2679 case COMPONENT_REF:
2680 result = build3 (COMPONENT_REF, type,
2681 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2682 data, init),
2683 TREE_OPERAND (ref, 1), NULL_TREE);
2684 break;
2685
2686 case BIT_FIELD_REF:
2687 result = build3 (BIT_FIELD_REF, type,
2688 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2689 data, init),
2690 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
2691 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
2692 break;
2693
2694 case ARRAY_REF:
2695 case ARRAY_RANGE_REF:
2696 result
2697 = build4 (code, type,
2698 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2699 init),
2700 func (TREE_OPERAND (ref, 1), data),
2701 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
2702 break;
2703
2704 case COMPOUND_EXPR:
2705 gcc_assert (*init == NULL_TREE);
2706 *init = TREE_OPERAND (ref, 0);
2707 /* We expect only the pattern built in Call_to_gnu. */
2708 gcc_assert (DECL_P (TREE_OPERAND (ref, 1))
2709 || (TREE_CODE (TREE_OPERAND (ref, 1)) == COMPONENT_REF
2710 && DECL_P (TREE_OPERAND (TREE_OPERAND (ref, 1), 0))));
2711 return TREE_OPERAND (ref, 1);
2712
2713 case CALL_EXPR:
2714 {
2715 /* This can only be an atomic load. */
2716 gcc_assert (call_is_atomic_load (ref));
2717
2718 /* An atomic load is an INDIRECT_REF of its first argument. */
2719 tree t = CALL_EXPR_ARG (ref, 0);
2720 if (TREE_CODE (t) == NOP_EXPR)
2721 t = TREE_OPERAND (t, 0);
2722 if (TREE_CODE (t) == ADDR_EXPR)
2723 t = build1 (ADDR_EXPR, TREE_TYPE (t),
2724 gnat_rewrite_reference (TREE_OPERAND (t, 0), func, data,
2725 init));
2726 else
2727 t = func (t, data);
2728 t = fold_convert (TREE_TYPE (CALL_EXPR_ARG (ref, 0)), t);
2729
2730 result = build_call_expr (TREE_OPERAND (CALL_EXPR_FN (ref), 0), 2,
2731 t, CALL_EXPR_ARG (ref, 1));
2732 }
2733 break;
2734
2735 case ERROR_MARK:
2736 return error_mark_node;
2737
2738 default:
2739 gcc_unreachable ();
2740 }
2741
2742 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2743 may not be sustained across some paths, such as the way via build1 for
2744 INDIRECT_REF. We reset those flags here in the general case, which is
2745 consistent with the GCC version of this routine.
2746
2747 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2748 paths introduce side-effects where there was none initially (e.g. if a
2749 SAVE_EXPR is built) and we also want to keep track of that. */
2750 TREE_READONLY (result) = TREE_READONLY (ref);
2751 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2752 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2753
2754 if (code == INDIRECT_REF
2755 || code == UNCONSTRAINED_ARRAY_REF
2756 || code == ARRAY_REF
2757 || code == ARRAY_RANGE_REF)
2758 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2759
2760 return result;
2761 }
2762
2763 /* This is equivalent to get_inner_reference in expr.c but it returns the
2764 ultimate containing object only if the reference (lvalue) is constant,
2765 i.e. if it doesn't depend on the context in which it is evaluated. */
2766
2767 tree
2768 get_inner_constant_reference (tree exp)
2769 {
2770 while (true)
2771 {
2772 switch (TREE_CODE (exp))
2773 {
2774 case BIT_FIELD_REF:
2775 break;
2776
2777 case COMPONENT_REF:
2778 if (TREE_OPERAND (exp, 2) != NULL_TREE)
2779 return NULL_TREE;
2780
2781 if (!TREE_CONSTANT (DECL_FIELD_OFFSET (TREE_OPERAND (exp, 1))))
2782 return NULL_TREE;
2783 break;
2784
2785 case ARRAY_REF:
2786 case ARRAY_RANGE_REF:
2787 {
2788 if (TREE_OPERAND (exp, 2) != NULL_TREE
2789 || TREE_OPERAND (exp, 3) != NULL_TREE)
2790 return NULL_TREE;
2791
2792 tree array_type = TREE_TYPE (TREE_OPERAND (exp, 0));
2793 if (!TREE_CONSTANT (TREE_OPERAND (exp, 1))
2794 || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
2795 || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type))))
2796 return NULL_TREE;
2797 }
2798 break;
2799
2800 case REALPART_EXPR:
2801 case IMAGPART_EXPR:
2802 case VIEW_CONVERT_EXPR:
2803 break;
2804
2805 default:
2806 goto done;
2807 }
2808
2809 exp = TREE_OPERAND (exp, 0);
2810 }
2811
2812 done:
2813 return exp;
2814 }
2815
2816 /* If EXPR is an expression that is invariant in the current function, in the
2817 sense that it can be evaluated anywhere in the function and any number of
2818 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2819
2820 tree
2821 gnat_invariant_expr (tree expr)
2822 {
2823 const tree type = TREE_TYPE (expr);
2824
2825 expr = remove_conversions (expr, false);
2826
2827 /* Look through temporaries created to capture values. */
2828 while ((TREE_CODE (expr) == CONST_DECL
2829 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2830 && decl_function_context (expr) == current_function_decl
2831 && DECL_INITIAL (expr))
2832 {
2833 expr = DECL_INITIAL (expr);
2834 /* Look into CONSTRUCTORs built to initialize padded types. */
2835 if (TYPE_IS_PADDING_P (TREE_TYPE (expr)))
2836 expr = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (expr))), expr);
2837 expr = remove_conversions (expr, false);
2838 }
2839
2840 /* We are only interested in scalar types at the moment and, even if we may
2841 have gone through padding types in the above loop, we must be back to a
2842 scalar value at this point. */
2843 if (AGGREGATE_TYPE_P (TREE_TYPE (expr)))
2844 return NULL_TREE;
2845
2846 if (TREE_CONSTANT (expr))
2847 return fold_convert (type, expr);
2848
2849 /* Skip overflow checks since they don't change the invariantness. */
2850 if (TREE_CODE (expr) == COND_EXPR
2851 && TREE_CODE (COND_EXPR_THEN (expr)) == COMPOUND_EXPR
2852 && TREE_CODE (TREE_OPERAND (COND_EXPR_THEN (expr), 0)) == CALL_EXPR
2853 && get_callee_fndecl (TREE_OPERAND (COND_EXPR_THEN (expr), 0))
2854 == gnat_raise_decls[CE_Overflow_Check_Failed])
2855 expr = COND_EXPR_ELSE (expr);
2856
2857 /* Deal with addition or subtraction of constants. */
2858 if (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR)
2859 {
2860 tree op0 = gnat_invariant_expr (TREE_OPERAND (expr, 0));
2861 tree op1 = TREE_OPERAND (expr, 1);
2862 if (op0 && TREE_CONSTANT (op1))
2863 return
2864 fold_build2 (TREE_CODE (expr), type,
2865 fold_convert (type, op0), fold_convert (type, op1));
2866 else
2867 return NULL_TREE;
2868 }
2869
2870 bool invariant_p = false;
2871 tree t = expr;
2872
2873 while (true)
2874 {
2875 switch (TREE_CODE (t))
2876 {
2877 case COMPONENT_REF:
2878 if (TREE_OPERAND (t, 2) != NULL_TREE)
2879 return NULL_TREE;
2880 invariant_p |= DECL_INVARIANT_P (TREE_OPERAND (t, 1));
2881 break;
2882
2883 case ARRAY_REF:
2884 case ARRAY_RANGE_REF:
2885 if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
2886 || TREE_OPERAND (t, 2) != NULL_TREE
2887 || TREE_OPERAND (t, 3) != NULL_TREE)
2888 return NULL_TREE;
2889 break;
2890
2891 case BIT_FIELD_REF:
2892 case REALPART_EXPR:
2893 case IMAGPART_EXPR:
2894 case VIEW_CONVERT_EXPR:
2895 CASE_CONVERT:
2896 break;
2897
2898 case INDIRECT_REF:
2899 if ((!invariant_p && !TREE_READONLY (t)) || TREE_SIDE_EFFECTS (t))
2900 return NULL_TREE;
2901 invariant_p = false;
2902 break;
2903
2904 default:
2905 goto object;
2906 }
2907
2908 t = TREE_OPERAND (t, 0);
2909 }
2910
2911 object:
2912 if (TREE_SIDE_EFFECTS (t))
2913 return NULL_TREE;
2914
2915 if (TREE_CODE (t) == CONST_DECL
2916 && (DECL_EXTERNAL (t)
2917 || decl_function_context (t) != current_function_decl))
2918 return fold_convert (type, expr);
2919
2920 if (!invariant_p && !TREE_READONLY (t))
2921 return NULL_TREE;
2922
2923 if (TREE_CODE (t) == PARM_DECL)
2924 return fold_convert (type, expr);
2925
2926 if (TREE_CODE (t) == VAR_DECL
2927 && (DECL_EXTERNAL (t)
2928 || decl_function_context (t) != current_function_decl))
2929 return fold_convert (type, expr);
2930
2931 return NULL_TREE;
2932 }
This page took 0.194882 seconds and 6 git commands to generate.