]> gcc.gnu.org Git - gcc.git/blob - gcc/ada/gcc-interface/utils2.c
utils2.c (build_binary_op): Tweak formatting.
[gcc.git] / gcc / ada / gcc-interface / utils2.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2015, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "vec.h"
31 #include "alias.h"
32 #include "tree.h"
33 #include "inchash.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stringpool.h"
37 #include "varasm.h"
38 #include "flags.h"
39 #include "toplev.h"
40 #include "ggc.h"
41 #include "tree-inline.h"
42
43 #include "ada.h"
44 #include "types.h"
45 #include "atree.h"
46 #include "elists.h"
47 #include "namet.h"
48 #include "nlists.h"
49 #include "snames.h"
50 #include "stringt.h"
51 #include "uintp.h"
52 #include "fe.h"
53 #include "sinfo.h"
54 #include "einfo.h"
55 #include "ada-tree.h"
56 #include "gigi.h"
57
58 /* Return the base type of TYPE. */
59
60 tree
61 get_base_type (tree type)
62 {
63 if (TREE_CODE (type) == RECORD_TYPE
64 && TYPE_JUSTIFIED_MODULAR_P (type))
65 type = TREE_TYPE (TYPE_FIELDS (type));
66
67 while (TREE_TYPE (type)
68 && (TREE_CODE (type) == INTEGER_TYPE
69 || TREE_CODE (type) == REAL_TYPE))
70 type = TREE_TYPE (type);
71
72 return type;
73 }
74 \f
75 /* EXP is a GCC tree representing an address. See if we can find how strictly
76 the object at this address is aligned and, if so, return the alignment of
77 the object in bits. Otherwise return 0. */
78
79 unsigned int
80 known_alignment (tree exp)
81 {
82 unsigned int this_alignment;
83 unsigned int lhs, rhs;
84
85 switch (TREE_CODE (exp))
86 {
87 CASE_CONVERT:
88 case VIEW_CONVERT_EXPR:
89 case NON_LVALUE_EXPR:
90 /* Conversions between pointers and integers don't change the alignment
91 of the underlying object. */
92 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
93 break;
94
95 case COMPOUND_EXPR:
96 /* The value of a COMPOUND_EXPR is that of its second operand. */
97 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
98 break;
99
100 case PLUS_EXPR:
101 case MINUS_EXPR:
102 /* If two addresses are added, the alignment of the result is the
103 minimum of the two alignments. */
104 lhs = known_alignment (TREE_OPERAND (exp, 0));
105 rhs = known_alignment (TREE_OPERAND (exp, 1));
106 this_alignment = MIN (lhs, rhs);
107 break;
108
109 case POINTER_PLUS_EXPR:
110 /* If this is the pattern built for aligning types, decode it. */
111 if (TREE_CODE (TREE_OPERAND (exp, 1)) == BIT_AND_EXPR
112 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) == NEGATE_EXPR)
113 {
114 tree op = TREE_OPERAND (TREE_OPERAND (exp, 1), 1);
115 return
116 known_alignment (fold_build1 (BIT_NOT_EXPR, TREE_TYPE (op), op));
117 }
118
119 /* If we don't know the alignment of the offset, we assume that
120 of the base. */
121 lhs = known_alignment (TREE_OPERAND (exp, 0));
122 rhs = known_alignment (TREE_OPERAND (exp, 1));
123
124 if (rhs == 0)
125 this_alignment = lhs;
126 else
127 this_alignment = MIN (lhs, rhs);
128 break;
129
130 case COND_EXPR:
131 /* If there is a choice between two values, use the smaller one. */
132 lhs = known_alignment (TREE_OPERAND (exp, 1));
133 rhs = known_alignment (TREE_OPERAND (exp, 2));
134 this_alignment = MIN (lhs, rhs);
135 break;
136
137 case INTEGER_CST:
138 {
139 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
140 /* The first part of this represents the lowest bit in the constant,
141 but it is originally in bytes, not bits. */
142 this_alignment = (c & -c) * BITS_PER_UNIT;
143 }
144 break;
145
146 case MULT_EXPR:
147 /* If we know the alignment of just one side, use it. Otherwise,
148 use the product of the alignments. */
149 lhs = known_alignment (TREE_OPERAND (exp, 0));
150 rhs = known_alignment (TREE_OPERAND (exp, 1));
151
152 if (lhs == 0)
153 this_alignment = rhs;
154 else if (rhs == 0)
155 this_alignment = lhs;
156 else
157 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
158 break;
159
160 case BIT_AND_EXPR:
161 /* A bit-and expression is as aligned as the maximum alignment of the
162 operands. We typically get here for a complex lhs and a constant
163 negative power of two on the rhs to force an explicit alignment, so
164 don't bother looking at the lhs. */
165 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
166 break;
167
168 case ADDR_EXPR:
169 this_alignment = expr_align (TREE_OPERAND (exp, 0));
170 break;
171
172 case CALL_EXPR:
173 {
174 tree t = maybe_inline_call_in_expr (exp);
175 if (t)
176 return known_alignment (t);
177 }
178
179 /* ... fall through ... */
180
181 default:
182 /* For other pointer expressions, we assume that the pointed-to object
183 is at least as aligned as the pointed-to type. Beware that we can
184 have a dummy type here (e.g. a Taft Amendment type), for which the
185 alignment is meaningless and should be ignored. */
186 if (POINTER_TYPE_P (TREE_TYPE (exp))
187 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
188 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
189 else
190 this_alignment = 0;
191 break;
192 }
193
194 return this_alignment;
195 }
196 \f
197 /* We have a comparison or assignment operation on two types, T1 and T2, which
198 are either both array types or both record types. T1 is assumed to be for
199 the left hand side operand, and T2 for the right hand side. Return the
200 type that both operands should be converted to for the operation, if any.
201 Otherwise return zero. */
202
203 static tree
204 find_common_type (tree t1, tree t2)
205 {
206 /* ??? As of today, various constructs lead to here with types of different
207 sizes even when both constants (e.g. tagged types, packable vs regular
208 component types, padded vs unpadded types, ...). While some of these
209 would better be handled upstream (types should be made consistent before
210 calling into build_binary_op), some others are really expected and we
211 have to be careful. */
212
213 /* We must avoid writing more than what the target can hold if this is for
214 an assignment and the case of tagged types is handled in build_binary_op
215 so we use the lhs type if it is known to be smaller or of constant size
216 and the rhs type is not, whatever the modes. We also force t1 in case of
217 constant size equality to minimize occurrences of view conversions on the
218 lhs of an assignment, except for the case of record types with a variant
219 part on the lhs but not on the rhs to make the conversion simpler. */
220 if (TREE_CONSTANT (TYPE_SIZE (t1))
221 && (!TREE_CONSTANT (TYPE_SIZE (t2))
222 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
223 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
224 && !(TREE_CODE (t1) == RECORD_TYPE
225 && TREE_CODE (t2) == RECORD_TYPE
226 && get_variant_part (t1) != NULL_TREE
227 && get_variant_part (t2) == NULL_TREE))))
228 return t1;
229
230 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
231 that we will not have any alignment problems since, if we did, the
232 non-BLKmode type could not have been used. */
233 if (TYPE_MODE (t1) != BLKmode)
234 return t1;
235
236 /* If the rhs type is of constant size, use it whatever the modes. At
237 this point it is known to be smaller, or of constant size and the
238 lhs type is not. */
239 if (TREE_CONSTANT (TYPE_SIZE (t2)))
240 return t2;
241
242 /* Otherwise, if the rhs type is non-BLKmode, use it. */
243 if (TYPE_MODE (t2) != BLKmode)
244 return t2;
245
246 /* In this case, both types have variable size and BLKmode. It's
247 probably best to leave the "type mismatch" because changing it
248 could cause a bad self-referential reference. */
249 return NULL_TREE;
250 }
251 \f
252 /* Return an expression tree representing an equality comparison of A1 and A2,
253 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
254
255 Two arrays are equal in one of two ways: (1) if both have zero length in
256 some dimension (not necessarily the same dimension) or (2) if the lengths
257 in each dimension are equal and the data is equal. We perform the length
258 tests in as efficient a manner as possible. */
259
260 static tree
261 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
262 {
263 tree result = convert (result_type, boolean_true_node);
264 tree a1_is_null = convert (result_type, boolean_false_node);
265 tree a2_is_null = convert (result_type, boolean_false_node);
266 tree t1 = TREE_TYPE (a1);
267 tree t2 = TREE_TYPE (a2);
268 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
269 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
270 bool length_zero_p = false;
271
272 /* If the operands have side-effects, they need to be evaluated only once
273 in spite of the multiple references in the comparison. */
274 if (a1_side_effects_p)
275 a1 = gnat_protect_expr (a1);
276
277 if (a2_side_effects_p)
278 a2 = gnat_protect_expr (a2);
279
280 /* Process each dimension separately and compare the lengths. If any
281 dimension has a length known to be zero, set LENGTH_ZERO_P to true
282 in order to suppress the comparison of the data at the end. */
283 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
284 {
285 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
286 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
287 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
288 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
289 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
290 size_one_node);
291 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
292 size_one_node);
293 tree comparison, this_a1_is_null, this_a2_is_null;
294
295 /* If the length of the first array is a constant, swap our operands
296 unless the length of the second array is the constant zero. */
297 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
298 {
299 tree tem;
300 bool btem;
301
302 tem = a1, a1 = a2, a2 = tem;
303 tem = t1, t1 = t2, t2 = tem;
304 tem = lb1, lb1 = lb2, lb2 = tem;
305 tem = ub1, ub1 = ub2, ub2 = tem;
306 tem = length1, length1 = length2, length2 = tem;
307 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
308 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
309 a2_side_effects_p = btem;
310 }
311
312 /* If the length of the second array is the constant zero, we can just
313 use the original stored bounds for the first array and see whether
314 last < first holds. */
315 if (integer_zerop (length2))
316 {
317 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
318
319 length_zero_p = true;
320
321 ub1
322 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
323 lb1
324 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
325
326 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
327 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
328 if (EXPR_P (comparison))
329 SET_EXPR_LOCATION (comparison, loc);
330
331 this_a1_is_null = comparison;
332 this_a2_is_null = convert (result_type, boolean_true_node);
333 }
334
335 /* Otherwise, if the length is some other constant value, we know that
336 this dimension in the second array cannot be superflat, so we can
337 just use its length computed from the actual stored bounds. */
338 else if (TREE_CODE (length2) == INTEGER_CST)
339 {
340 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
341
342 ub1
343 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
344 lb1
345 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
346 /* Note that we know that UB2 and LB2 are constant and hence
347 cannot contain a PLACEHOLDER_EXPR. */
348 ub2
349 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
350 lb2
351 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
352
353 comparison
354 = fold_build2_loc (loc, EQ_EXPR, result_type,
355 build_binary_op (MINUS_EXPR, b, ub1, lb1),
356 build_binary_op (MINUS_EXPR, b, ub2, lb2));
357 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
358 if (EXPR_P (comparison))
359 SET_EXPR_LOCATION (comparison, loc);
360
361 this_a1_is_null
362 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
363
364 this_a2_is_null = convert (result_type, boolean_false_node);
365 }
366
367 /* Otherwise, compare the computed lengths. */
368 else
369 {
370 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
371 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
372
373 comparison
374 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
375
376 /* If the length expression is of the form (cond ? val : 0), assume
377 that cond is equivalent to (length != 0). That's guaranteed by
378 construction of the array types in gnat_to_gnu_entity. */
379 if (TREE_CODE (length1) == COND_EXPR
380 && integer_zerop (TREE_OPERAND (length1, 2)))
381 this_a1_is_null
382 = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
383 else
384 this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
385 length1, size_zero_node);
386
387 /* Likewise for the second array. */
388 if (TREE_CODE (length2) == COND_EXPR
389 && integer_zerop (TREE_OPERAND (length2, 2)))
390 this_a2_is_null
391 = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
392 else
393 this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
394 length2, size_zero_node);
395 }
396
397 /* Append expressions for this dimension to the final expressions. */
398 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
399 result, comparison);
400
401 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
402 this_a1_is_null, a1_is_null);
403
404 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
405 this_a2_is_null, a2_is_null);
406
407 t1 = TREE_TYPE (t1);
408 t2 = TREE_TYPE (t2);
409 }
410
411 /* Unless the length of some dimension is known to be zero, compare the
412 data in the array. */
413 if (!length_zero_p)
414 {
415 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
416 tree comparison;
417
418 if (type)
419 {
420 a1 = convert (type, a1),
421 a2 = convert (type, a2);
422 }
423
424 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
425
426 result
427 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
428 }
429
430 /* The result is also true if both sizes are zero. */
431 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
432 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
433 a1_is_null, a2_is_null),
434 result);
435
436 /* If the operands have side-effects, they need to be evaluated before
437 doing the tests above since the place they otherwise would end up
438 being evaluated at run time could be wrong. */
439 if (a1_side_effects_p)
440 result = build2 (COMPOUND_EXPR, result_type, a1, result);
441
442 if (a2_side_effects_p)
443 result = build2 (COMPOUND_EXPR, result_type, a2, result);
444
445 return result;
446 }
447
448 /* Return an expression tree representing an equality comparison of P1 and P2,
449 two objects of fat pointer type. The result should be of type RESULT_TYPE.
450
451 Two fat pointers are equal in one of two ways: (1) if both have a null
452 pointer to the array or (2) if they contain the same couple of pointers.
453 We perform the comparison in as efficient a manner as possible. */
454
455 static tree
456 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
457 {
458 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
459 tree p1_array_is_null, p2_array_is_null;
460
461 /* If either operand has side-effects, they have to be evaluated only once
462 in spite of the multiple references to the operand in the comparison. */
463 p1 = gnat_protect_expr (p1);
464 p2 = gnat_protect_expr (p2);
465
466 /* The constant folder doesn't fold fat pointer types so we do it here. */
467 if (TREE_CODE (p1) == CONSTRUCTOR)
468 p1_array = CONSTRUCTOR_ELT (p1, 0)->value;
469 else
470 p1_array = build_component_ref (p1, NULL_TREE,
471 TYPE_FIELDS (TREE_TYPE (p1)), true);
472
473 p1_array_is_null
474 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
475 fold_convert_loc (loc, TREE_TYPE (p1_array),
476 null_pointer_node));
477
478 if (TREE_CODE (p2) == CONSTRUCTOR)
479 p2_array = CONSTRUCTOR_ELT (p2, 0)->value;
480 else
481 p2_array = build_component_ref (p2, NULL_TREE,
482 TYPE_FIELDS (TREE_TYPE (p2)), true);
483
484 p2_array_is_null
485 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
486 fold_convert_loc (loc, TREE_TYPE (p2_array),
487 null_pointer_node));
488
489 /* If one of the pointers to the array is null, just compare the other. */
490 if (integer_zerop (p1_array))
491 return p2_array_is_null;
492 else if (integer_zerop (p2_array))
493 return p1_array_is_null;
494
495 /* Otherwise, do the fully-fledged comparison. */
496 same_array
497 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
498
499 if (TREE_CODE (p1) == CONSTRUCTOR)
500 p1_bounds = CONSTRUCTOR_ELT (p1, 1)->value;
501 else
502 p1_bounds
503 = build_component_ref (p1, NULL_TREE,
504 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))), true);
505
506 if (TREE_CODE (p2) == CONSTRUCTOR)
507 p2_bounds = CONSTRUCTOR_ELT (p2, 1)->value;
508 else
509 p2_bounds
510 = build_component_ref (p2, NULL_TREE,
511 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))), true);
512
513 same_bounds
514 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
515
516 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
517 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
518 build_binary_op (TRUTH_ORIF_EXPR, result_type,
519 p1_array_is_null, same_bounds));
520 }
521 \f
522 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
523 type TYPE. We know that TYPE is a modular type with a nonbinary
524 modulus. */
525
526 static tree
527 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
528 tree rhs)
529 {
530 tree modulus = TYPE_MODULUS (type);
531 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
532 unsigned int precision;
533 bool unsignedp = true;
534 tree op_type = type;
535 tree result;
536
537 /* If this is an addition of a constant, convert it to a subtraction
538 of a constant since we can do that faster. */
539 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
540 {
541 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
542 op_code = MINUS_EXPR;
543 }
544
545 /* For the logical operations, we only need PRECISION bits. For
546 addition and subtraction, we need one more and for multiplication we
547 need twice as many. But we never want to make a size smaller than
548 our size. */
549 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
550 needed_precision += 1;
551 else if (op_code == MULT_EXPR)
552 needed_precision *= 2;
553
554 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
555
556 /* Unsigned will do for everything but subtraction. */
557 if (op_code == MINUS_EXPR)
558 unsignedp = false;
559
560 /* If our type is the wrong signedness or isn't wide enough, make a new
561 type and convert both our operands to it. */
562 if (TYPE_PRECISION (op_type) < precision
563 || TYPE_UNSIGNED (op_type) != unsignedp)
564 {
565 /* Copy the node so we ensure it can be modified to make it modular. */
566 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
567 modulus = convert (op_type, modulus);
568 SET_TYPE_MODULUS (op_type, modulus);
569 TYPE_MODULAR_P (op_type) = 1;
570 lhs = convert (op_type, lhs);
571 rhs = convert (op_type, rhs);
572 }
573
574 /* Do the operation, then we'll fix it up. */
575 result = fold_build2 (op_code, op_type, lhs, rhs);
576
577 /* For multiplication, we have no choice but to do a full modulus
578 operation. However, we want to do this in the narrowest
579 possible size. */
580 if (op_code == MULT_EXPR)
581 {
582 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
583 modulus = convert (div_type, modulus);
584 SET_TYPE_MODULUS (div_type, modulus);
585 TYPE_MODULAR_P (div_type) = 1;
586 result = convert (op_type,
587 fold_build2 (TRUNC_MOD_EXPR, div_type,
588 convert (div_type, result), modulus));
589 }
590
591 /* For subtraction, add the modulus back if we are negative. */
592 else if (op_code == MINUS_EXPR)
593 {
594 result = gnat_protect_expr (result);
595 result = fold_build3 (COND_EXPR, op_type,
596 fold_build2 (LT_EXPR, boolean_type_node, result,
597 convert (op_type, integer_zero_node)),
598 fold_build2 (PLUS_EXPR, op_type, result, modulus),
599 result);
600 }
601
602 /* For the other operations, subtract the modulus if we are >= it. */
603 else
604 {
605 result = gnat_protect_expr (result);
606 result = fold_build3 (COND_EXPR, op_type,
607 fold_build2 (GE_EXPR, boolean_type_node,
608 result, modulus),
609 fold_build2 (MINUS_EXPR, op_type,
610 result, modulus),
611 result);
612 }
613
614 return convert (type, result);
615 }
616 \f
617 /* This page contains routines that implement the Ada semantics with regard
618 to atomic objects. They are fully piggybacked on the middle-end support
619 for atomic loads and stores.
620
621 *** Memory barriers and volatile objects ***
622
623 We implement the weakened form of the C.6(16) clause that was introduced
624 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
625 implementable without significant performance hits on modern platforms.
626
627 We also take advantage of the requirements imposed on shared variables by
628 9.10 (conditions for sequential actions) to have non-erroneous execution
629 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
630 volatile updates with regard to sequential actions, i.e. with regard to
631 reads or updates of atomic objects.
632
633 As such, an update of an atomic object by a task requires that all earlier
634 accesses to volatile objects have completed. Similarly, later accesses to
635 volatile objects cannot be reordered before the update of the atomic object.
636 So, memory barriers both before and after the atomic update are needed.
637
638 For a read of an atomic object, to avoid seeing writes of volatile objects
639 by a task earlier than by the other tasks, a memory barrier is needed before
640 the atomic read. Finally, to avoid reordering later reads or updates of
641 volatile objects to before the atomic read, a barrier is needed after the
642 atomic read.
643
644 So, memory barriers are needed before and after atomic reads and updates.
645 And, in order to simplify the implementation, we use full memory barriers
646 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
647
648 /* Return the size of TYPE, which must be a positive power of 2. */
649
650 static unsigned int
651 resolve_atomic_size (tree type)
652 {
653 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
654
655 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
656 return size;
657
658 /* We shouldn't reach here without having already detected that the size
659 isn't compatible with an atomic access. */
660 gcc_assert (Serious_Errors_Detected);
661
662 return 0;
663 }
664
665 /* Build an atomic load for the underlying atomic object in SRC. SYNC is
666 true if the load requires synchronization. */
667
668 tree
669 build_atomic_load (tree src, bool sync)
670 {
671 tree ptr_type
672 = build_pointer_type
673 (build_qualified_type (void_type_node,
674 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
675 tree mem_model
676 = build_int_cst (integer_type_node,
677 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
678 tree orig_src = src;
679 tree t, addr, val;
680 unsigned int size;
681 int fncode;
682
683 /* Remove conversions to get the address of the underlying object. */
684 src = remove_conversions (src, false);
685 size = resolve_atomic_size (TREE_TYPE (src));
686 if (size == 0)
687 return orig_src;
688
689 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
690 t = builtin_decl_implicit ((enum built_in_function) fncode);
691
692 addr = build_unary_op (ADDR_EXPR, ptr_type, src);
693 val = build_call_expr (t, 2, addr, mem_model);
694
695 /* First reinterpret the loaded bits in the original type of the load,
696 then convert to the expected result type. */
697 t = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (src), val);
698 return convert (TREE_TYPE (orig_src), t);
699 }
700
701 /* Build an atomic store from SRC to the underlying atomic object in DEST.
702 SYNC is true if the store requires synchronization. */
703
704 tree
705 build_atomic_store (tree dest, tree src, bool sync)
706 {
707 tree ptr_type
708 = build_pointer_type
709 (build_qualified_type (void_type_node,
710 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
711 tree mem_model
712 = build_int_cst (integer_type_node,
713 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
714 tree orig_dest = dest;
715 tree t, int_type, addr;
716 unsigned int size;
717 int fncode;
718
719 /* Remove conversions to get the address of the underlying object. */
720 dest = remove_conversions (dest, false);
721 size = resolve_atomic_size (TREE_TYPE (dest));
722 if (size == 0)
723 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
724
725 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
726 t = builtin_decl_implicit ((enum built_in_function) fncode);
727 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
728
729 /* First convert the bits to be stored to the original type of the store,
730 then reinterpret them in the effective type. But if the original type
731 is a padded type with the same size, convert to the inner type instead,
732 as we don't want to artificially introduce a CONSTRUCTOR here. */
733 if (TYPE_IS_PADDING_P (TREE_TYPE (dest))
734 && TYPE_SIZE (TREE_TYPE (dest))
735 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest)))))
736 src = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest))), src);
737 else
738 src = convert (TREE_TYPE (dest), src);
739 src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
740 addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
741
742 return build_call_expr (t, 3, addr, src, mem_model);
743 }
744
745 /* Build a load-modify-store sequence from SRC to DEST. GNAT_NODE is used for
746 the location of the sequence. Note that, even though the load and the store
747 are both atomic, the sequence itself is not atomic. */
748
749 tree
750 build_load_modify_store (tree dest, tree src, Node_Id gnat_node)
751 {
752 /* We will be modifying DEST below so we build a copy. */
753 dest = copy_node (dest);
754 tree ref = dest;
755
756 while (handled_component_p (ref))
757 {
758 /* The load should already have been generated during the translation
759 of the GNAT destination tree; find it out in the GNU tree. */
760 if (TREE_CODE (TREE_OPERAND (ref, 0)) == VIEW_CONVERT_EXPR)
761 {
762 tree op = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
763 if (TREE_CODE (op) == CALL_EXPR && call_is_atomic_load (op))
764 {
765 tree type = TREE_TYPE (TREE_OPERAND (ref, 0));
766 tree t = CALL_EXPR_ARG (op, 0);
767 tree obj, temp, stmt;
768
769 /* Find out the loaded object. */
770 if (TREE_CODE (t) == NOP_EXPR)
771 t = TREE_OPERAND (t, 0);
772 if (TREE_CODE (t) == ADDR_EXPR)
773 obj = TREE_OPERAND (t, 0);
774 else
775 obj = build1 (INDIRECT_REF, type, t);
776
777 /* Drop atomic and volatile qualifiers for the temporary. */
778 type = TYPE_MAIN_VARIANT (type);
779
780 /* And drop BLKmode, if need be, to put it into a register. */
781 if (TYPE_MODE (type) == BLKmode)
782 {
783 unsigned int size = tree_to_uhwi (TYPE_SIZE (type));
784 type = copy_type (type);
785 SET_TYPE_MODE (type, mode_for_size (size, MODE_INT, 0));
786 }
787
788 /* Create the temporary by inserting a SAVE_EXPR. */
789 temp = build1 (SAVE_EXPR, type,
790 build1 (VIEW_CONVERT_EXPR, type, op));
791 TREE_OPERAND (ref, 0) = temp;
792
793 start_stmt_group ();
794
795 /* Build the modify of the temporary. */
796 stmt = build_binary_op (MODIFY_EXPR, NULL_TREE, dest, src);
797 add_stmt_with_node (stmt, gnat_node);
798
799 /* Build the store to the object. */
800 stmt = build_atomic_store (obj, temp, false);
801 add_stmt_with_node (stmt, gnat_node);
802
803 return end_stmt_group ();
804 }
805 }
806
807 TREE_OPERAND (ref, 0) = copy_node (TREE_OPERAND (ref, 0));
808 ref = TREE_OPERAND (ref, 0);
809 }
810
811 /* Something went wrong earlier if we have not found the atomic load. */
812 gcc_unreachable ();
813 }
814 \f
815 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
816 desired for the result. Usually the operation is to be performed
817 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
818 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
819 case the type to be used will be derived from the operands.
820
821 This function is very much unlike the ones for C and C++ since we
822 have already done any type conversion and matching required. All we
823 have to do here is validate the work done by SEM and handle subtypes. */
824
825 tree
826 build_binary_op (enum tree_code op_code, tree result_type,
827 tree left_operand, tree right_operand)
828 {
829 tree left_type = TREE_TYPE (left_operand);
830 tree right_type = TREE_TYPE (right_operand);
831 tree left_base_type = get_base_type (left_type);
832 tree right_base_type = get_base_type (right_type);
833 tree operation_type = result_type;
834 tree best_type = NULL_TREE;
835 tree modulus, result;
836 bool has_side_effects = false;
837
838 if (operation_type
839 && TREE_CODE (operation_type) == RECORD_TYPE
840 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
841 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
842
843 if (operation_type
844 && TREE_CODE (operation_type) == INTEGER_TYPE
845 && TYPE_EXTRA_SUBTYPE_P (operation_type))
846 operation_type = get_base_type (operation_type);
847
848 modulus = (operation_type
849 && TREE_CODE (operation_type) == INTEGER_TYPE
850 && TYPE_MODULAR_P (operation_type)
851 ? TYPE_MODULUS (operation_type) : NULL_TREE);
852
853 switch (op_code)
854 {
855 case INIT_EXPR:
856 case MODIFY_EXPR:
857 gcc_checking_assert (result_type == NULL_TREE);
858
859 /* If there were integral or pointer conversions on the LHS, remove
860 them; we'll be putting them back below if needed. Likewise for
861 conversions between array and record types, except for justified
862 modular types. But don't do this if the right operand is not
863 BLKmode (for packed arrays) unless we are not changing the mode. */
864 while ((CONVERT_EXPR_P (left_operand)
865 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
866 && (((INTEGRAL_TYPE_P (left_type)
867 || POINTER_TYPE_P (left_type))
868 && (INTEGRAL_TYPE_P (TREE_TYPE
869 (TREE_OPERAND (left_operand, 0)))
870 || POINTER_TYPE_P (TREE_TYPE
871 (TREE_OPERAND (left_operand, 0)))))
872 || (((TREE_CODE (left_type) == RECORD_TYPE
873 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
874 || TREE_CODE (left_type) == ARRAY_TYPE)
875 && ((TREE_CODE (TREE_TYPE
876 (TREE_OPERAND (left_operand, 0)))
877 == RECORD_TYPE)
878 || (TREE_CODE (TREE_TYPE
879 (TREE_OPERAND (left_operand, 0)))
880 == ARRAY_TYPE))
881 && (TYPE_MODE (right_type) == BLKmode
882 || (TYPE_MODE (left_type)
883 == TYPE_MODE (TREE_TYPE
884 (TREE_OPERAND
885 (left_operand, 0))))))))
886 {
887 left_operand = TREE_OPERAND (left_operand, 0);
888 left_type = TREE_TYPE (left_operand);
889 }
890
891 /* If a class-wide type may be involved, force use of the RHS type. */
892 if ((TREE_CODE (right_type) == RECORD_TYPE
893 || TREE_CODE (right_type) == UNION_TYPE)
894 && TYPE_ALIGN_OK (right_type))
895 operation_type = right_type;
896
897 /* If we are copying between padded objects with compatible types, use
898 the padded view of the objects, this is very likely more efficient.
899 Likewise for a padded object that is assigned a constructor, if we
900 can convert the constructor to the inner type, to avoid putting a
901 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
902 actually copied anything. */
903 else if (TYPE_IS_PADDING_P (left_type)
904 && TREE_CONSTANT (TYPE_SIZE (left_type))
905 && ((TREE_CODE (right_operand) == COMPONENT_REF
906 && TYPE_MAIN_VARIANT (left_type)
907 == TYPE_MAIN_VARIANT
908 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
909 || (TREE_CODE (right_operand) == CONSTRUCTOR
910 && !CONTAINS_PLACEHOLDER_P
911 (DECL_SIZE (TYPE_FIELDS (left_type)))))
912 && !integer_zerop (TYPE_SIZE (right_type)))
913 {
914 /* We make an exception for a BLKmode type padding a non-BLKmode
915 inner type and do the conversion of the LHS right away, since
916 unchecked_convert wouldn't do it properly. */
917 if (TYPE_MODE (left_type) == BLKmode
918 && TYPE_MODE (right_type) != BLKmode
919 && TREE_CODE (right_operand) != CONSTRUCTOR)
920 {
921 operation_type = right_type;
922 left_operand = convert (operation_type, left_operand);
923 left_type = operation_type;
924 }
925 else
926 operation_type = left_type;
927 }
928
929 /* If we have a call to a function that returns with variable size, use
930 the RHS type in case we want to use the return slot optimization. */
931 else if (TREE_CODE (right_operand) == CALL_EXPR
932 && return_type_with_variable_size_p (right_type))
933 operation_type = right_type;
934
935 /* Find the best type to use for copying between aggregate types. */
936 else if (((TREE_CODE (left_type) == ARRAY_TYPE
937 && TREE_CODE (right_type) == ARRAY_TYPE)
938 || (TREE_CODE (left_type) == RECORD_TYPE
939 && TREE_CODE (right_type) == RECORD_TYPE))
940 && (best_type = find_common_type (left_type, right_type)))
941 operation_type = best_type;
942
943 /* Otherwise use the LHS type. */
944 else
945 operation_type = left_type;
946
947 /* Ensure everything on the LHS is valid. If we have a field reference,
948 strip anything that get_inner_reference can handle. Then remove any
949 conversions between types having the same code and mode. And mark
950 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
951 either an INDIRECT_REF, a NULL_EXPR, a SAVE_EXPR or a DECL node. */
952 result = left_operand;
953 while (true)
954 {
955 tree restype = TREE_TYPE (result);
956
957 if (TREE_CODE (result) == COMPONENT_REF
958 || TREE_CODE (result) == ARRAY_REF
959 || TREE_CODE (result) == ARRAY_RANGE_REF)
960 while (handled_component_p (result))
961 result = TREE_OPERAND (result, 0);
962 else if (TREE_CODE (result) == REALPART_EXPR
963 || TREE_CODE (result) == IMAGPART_EXPR
964 || (CONVERT_EXPR_P (result)
965 && (((TREE_CODE (restype)
966 == TREE_CODE (TREE_TYPE
967 (TREE_OPERAND (result, 0))))
968 && (TYPE_MODE (TREE_TYPE
969 (TREE_OPERAND (result, 0)))
970 == TYPE_MODE (restype)))
971 || TYPE_ALIGN_OK (restype))))
972 result = TREE_OPERAND (result, 0);
973 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
974 {
975 TREE_ADDRESSABLE (result) = 1;
976 result = TREE_OPERAND (result, 0);
977 }
978 else
979 break;
980 }
981
982 gcc_assert (TREE_CODE (result) == INDIRECT_REF
983 || TREE_CODE (result) == NULL_EXPR
984 || TREE_CODE (result) == SAVE_EXPR
985 || DECL_P (result));
986
987 /* Convert the right operand to the operation type unless it is
988 either already of the correct type or if the type involves a
989 placeholder, since the RHS may not have the same record type. */
990 if (operation_type != right_type
991 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
992 {
993 right_operand = convert (operation_type, right_operand);
994 right_type = operation_type;
995 }
996
997 /* If the left operand is not of the same type as the operation
998 type, wrap it up in a VIEW_CONVERT_EXPR. */
999 if (left_type != operation_type)
1000 left_operand = unchecked_convert (operation_type, left_operand, false);
1001
1002 has_side_effects = true;
1003 modulus = NULL_TREE;
1004 break;
1005
1006 case ARRAY_REF:
1007 if (!operation_type)
1008 operation_type = TREE_TYPE (left_type);
1009
1010 /* ... fall through ... */
1011
1012 case ARRAY_RANGE_REF:
1013 /* First look through conversion between type variants. Note that
1014 this changes neither the operation type nor the type domain. */
1015 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
1016 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
1017 == TYPE_MAIN_VARIANT (left_type))
1018 {
1019 left_operand = TREE_OPERAND (left_operand, 0);
1020 left_type = TREE_TYPE (left_operand);
1021 }
1022
1023 /* For a range, make sure the element type is consistent. */
1024 if (op_code == ARRAY_RANGE_REF
1025 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
1026 operation_type = build_array_type (TREE_TYPE (left_type),
1027 TYPE_DOMAIN (operation_type));
1028
1029 /* Then convert the right operand to its base type. This will prevent
1030 unneeded sign conversions when sizetype is wider than integer. */
1031 right_operand = convert (right_base_type, right_operand);
1032 right_operand = convert_to_index_type (right_operand);
1033 modulus = NULL_TREE;
1034 break;
1035
1036 case TRUTH_ANDIF_EXPR:
1037 case TRUTH_ORIF_EXPR:
1038 case TRUTH_AND_EXPR:
1039 case TRUTH_OR_EXPR:
1040 case TRUTH_XOR_EXPR:
1041 gcc_checking_assert
1042 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1043 operation_type = left_base_type;
1044 left_operand = convert (operation_type, left_operand);
1045 right_operand = convert (operation_type, right_operand);
1046 break;
1047
1048 case GE_EXPR:
1049 case LE_EXPR:
1050 case GT_EXPR:
1051 case LT_EXPR:
1052 case EQ_EXPR:
1053 case NE_EXPR:
1054 gcc_checking_assert
1055 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1056 /* If either operand is a NULL_EXPR, just return a new one. */
1057 if (TREE_CODE (left_operand) == NULL_EXPR)
1058 return build2 (op_code, result_type,
1059 build1 (NULL_EXPR, integer_type_node,
1060 TREE_OPERAND (left_operand, 0)),
1061 integer_zero_node);
1062
1063 else if (TREE_CODE (right_operand) == NULL_EXPR)
1064 return build2 (op_code, result_type,
1065 build1 (NULL_EXPR, integer_type_node,
1066 TREE_OPERAND (right_operand, 0)),
1067 integer_zero_node);
1068
1069 /* If either object is a justified modular types, get the
1070 fields from within. */
1071 if (TREE_CODE (left_type) == RECORD_TYPE
1072 && TYPE_JUSTIFIED_MODULAR_P (left_type))
1073 {
1074 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
1075 left_operand);
1076 left_type = TREE_TYPE (left_operand);
1077 left_base_type = get_base_type (left_type);
1078 }
1079
1080 if (TREE_CODE (right_type) == RECORD_TYPE
1081 && TYPE_JUSTIFIED_MODULAR_P (right_type))
1082 {
1083 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
1084 right_operand);
1085 right_type = TREE_TYPE (right_operand);
1086 right_base_type = get_base_type (right_type);
1087 }
1088
1089 /* If both objects are arrays, compare them specially. */
1090 if ((TREE_CODE (left_type) == ARRAY_TYPE
1091 || (TREE_CODE (left_type) == INTEGER_TYPE
1092 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
1093 && (TREE_CODE (right_type) == ARRAY_TYPE
1094 || (TREE_CODE (right_type) == INTEGER_TYPE
1095 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1096 {
1097 result = compare_arrays (input_location,
1098 result_type, left_operand, right_operand);
1099 if (op_code == NE_EXPR)
1100 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1101 else
1102 gcc_assert (op_code == EQ_EXPR);
1103
1104 return result;
1105 }
1106
1107 /* Otherwise, the base types must be the same, unless they are both fat
1108 pointer types or record types. In the latter case, use the best type
1109 and convert both operands to that type. */
1110 if (left_base_type != right_base_type)
1111 {
1112 if (TYPE_IS_FAT_POINTER_P (left_base_type)
1113 && TYPE_IS_FAT_POINTER_P (right_base_type))
1114 {
1115 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1116 == TYPE_MAIN_VARIANT (right_base_type));
1117 best_type = left_base_type;
1118 }
1119
1120 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1121 && TREE_CODE (right_base_type) == RECORD_TYPE)
1122 {
1123 /* The only way this is permitted is if both types have the same
1124 name. In that case, one of them must not be self-referential.
1125 Use it as the best type. Even better with a fixed size. */
1126 gcc_assert (TYPE_NAME (left_base_type)
1127 && TYPE_NAME (left_base_type)
1128 == TYPE_NAME (right_base_type));
1129
1130 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1131 best_type = left_base_type;
1132 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1133 best_type = right_base_type;
1134 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1135 best_type = left_base_type;
1136 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1137 best_type = right_base_type;
1138 else
1139 gcc_unreachable ();
1140 }
1141
1142 else if (POINTER_TYPE_P (left_base_type)
1143 && POINTER_TYPE_P (right_base_type))
1144 {
1145 gcc_assert (TREE_TYPE (left_base_type)
1146 == TREE_TYPE (right_base_type));
1147 best_type = left_base_type;
1148 }
1149 else
1150 gcc_unreachable ();
1151
1152 left_operand = convert (best_type, left_operand);
1153 right_operand = convert (best_type, right_operand);
1154 }
1155 else
1156 {
1157 left_operand = convert (left_base_type, left_operand);
1158 right_operand = convert (right_base_type, right_operand);
1159 }
1160
1161 /* If both objects are fat pointers, compare them specially. */
1162 if (TYPE_IS_FAT_POINTER_P (left_base_type))
1163 {
1164 result
1165 = compare_fat_pointers (input_location,
1166 result_type, left_operand, right_operand);
1167 if (op_code == NE_EXPR)
1168 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1169 else
1170 gcc_assert (op_code == EQ_EXPR);
1171
1172 return result;
1173 }
1174
1175 modulus = NULL_TREE;
1176 break;
1177
1178 case LSHIFT_EXPR:
1179 case RSHIFT_EXPR:
1180 case LROTATE_EXPR:
1181 case RROTATE_EXPR:
1182 /* The RHS of a shift can be any type. Also, ignore any modulus
1183 (we used to abort, but this is needed for unchecked conversion
1184 to modular types). Otherwise, processing is the same as normal. */
1185 gcc_assert (operation_type == left_base_type);
1186 modulus = NULL_TREE;
1187 left_operand = convert (operation_type, left_operand);
1188 break;
1189
1190 case BIT_AND_EXPR:
1191 case BIT_IOR_EXPR:
1192 case BIT_XOR_EXPR:
1193 /* For binary modulus, if the inputs are in range, so are the
1194 outputs. */
1195 if (modulus && integer_pow2p (modulus))
1196 modulus = NULL_TREE;
1197 goto common;
1198
1199 case COMPLEX_EXPR:
1200 gcc_assert (TREE_TYPE (result_type) == left_base_type
1201 && TREE_TYPE (result_type) == right_base_type);
1202 left_operand = convert (left_base_type, left_operand);
1203 right_operand = convert (right_base_type, right_operand);
1204 break;
1205
1206 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1207 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1208 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1209 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1210 /* These always produce results lower than either operand. */
1211 modulus = NULL_TREE;
1212 goto common;
1213
1214 case POINTER_PLUS_EXPR:
1215 gcc_assert (operation_type == left_base_type
1216 && sizetype == right_base_type);
1217 left_operand = convert (operation_type, left_operand);
1218 right_operand = convert (sizetype, right_operand);
1219 break;
1220
1221 case PLUS_NOMOD_EXPR:
1222 case MINUS_NOMOD_EXPR:
1223 if (op_code == PLUS_NOMOD_EXPR)
1224 op_code = PLUS_EXPR;
1225 else
1226 op_code = MINUS_EXPR;
1227 modulus = NULL_TREE;
1228
1229 /* ... fall through ... */
1230
1231 case PLUS_EXPR:
1232 case MINUS_EXPR:
1233 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1234 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1235 these types but can generate addition/subtraction for Succ/Pred. */
1236 if (operation_type
1237 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1238 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1239 operation_type = left_base_type = right_base_type
1240 = gnat_type_for_mode (TYPE_MODE (operation_type),
1241 TYPE_UNSIGNED (operation_type));
1242
1243 /* ... fall through ... */
1244
1245 default:
1246 common:
1247 /* The result type should be the same as the base types of the
1248 both operands (and they should be the same). Convert
1249 everything to the result type. */
1250
1251 gcc_assert (operation_type == left_base_type
1252 && left_base_type == right_base_type);
1253 left_operand = convert (operation_type, left_operand);
1254 right_operand = convert (operation_type, right_operand);
1255 }
1256
1257 if (modulus && !integer_pow2p (modulus))
1258 {
1259 result = nonbinary_modular_operation (op_code, operation_type,
1260 left_operand, right_operand);
1261 modulus = NULL_TREE;
1262 }
1263 /* If either operand is a NULL_EXPR, just return a new one. */
1264 else if (TREE_CODE (left_operand) == NULL_EXPR)
1265 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1266 else if (TREE_CODE (right_operand) == NULL_EXPR)
1267 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1268 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1269 result = fold (build4 (op_code, operation_type, left_operand,
1270 right_operand, NULL_TREE, NULL_TREE));
1271 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1272 result = build2 (op_code, void_type_node, left_operand, right_operand);
1273 else
1274 result
1275 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1276
1277 if (TREE_CONSTANT (result))
1278 ;
1279 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1280 {
1281 if (TYPE_VOLATILE (operation_type))
1282 TREE_THIS_VOLATILE (result) = 1;
1283 }
1284 else
1285 TREE_CONSTANT (result)
1286 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1287
1288 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1289
1290 /* If we are working with modular types, perform the MOD operation
1291 if something above hasn't eliminated the need for it. */
1292 if (modulus)
1293 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1294 convert (operation_type, modulus));
1295
1296 if (result_type && result_type != operation_type)
1297 result = convert (result_type, result);
1298
1299 return result;
1300 }
1301 \f
1302 /* Similar, but for unary operations. */
1303
1304 tree
1305 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1306 {
1307 tree type = TREE_TYPE (operand);
1308 tree base_type = get_base_type (type);
1309 tree operation_type = result_type;
1310 tree result;
1311
1312 if (operation_type
1313 && TREE_CODE (operation_type) == RECORD_TYPE
1314 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1315 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1316
1317 if (operation_type
1318 && TREE_CODE (operation_type) == INTEGER_TYPE
1319 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1320 operation_type = get_base_type (operation_type);
1321
1322 switch (op_code)
1323 {
1324 case REALPART_EXPR:
1325 case IMAGPART_EXPR:
1326 if (!operation_type)
1327 result_type = operation_type = TREE_TYPE (type);
1328 else
1329 gcc_assert (result_type == TREE_TYPE (type));
1330
1331 result = fold_build1 (op_code, operation_type, operand);
1332 break;
1333
1334 case TRUTH_NOT_EXPR:
1335 gcc_checking_assert
1336 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1337 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1338 /* When not optimizing, fold the result as invert_truthvalue_loc
1339 doesn't fold the result of comparisons. This is intended to undo
1340 the trick used for boolean rvalues in gnat_to_gnu. */
1341 if (!optimize)
1342 result = fold (result);
1343 break;
1344
1345 case ATTR_ADDR_EXPR:
1346 case ADDR_EXPR:
1347 switch (TREE_CODE (operand))
1348 {
1349 case INDIRECT_REF:
1350 case UNCONSTRAINED_ARRAY_REF:
1351 result = TREE_OPERAND (operand, 0);
1352
1353 /* Make sure the type here is a pointer, not a reference.
1354 GCC wants pointer types for function addresses. */
1355 if (!result_type)
1356 result_type = build_pointer_type (type);
1357
1358 /* If the underlying object can alias everything, propagate the
1359 property since we are effectively retrieving the object. */
1360 if (POINTER_TYPE_P (TREE_TYPE (result))
1361 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1362 {
1363 if (TREE_CODE (result_type) == POINTER_TYPE
1364 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1365 result_type
1366 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1367 TYPE_MODE (result_type),
1368 true);
1369 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1370 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1371 result_type
1372 = build_reference_type_for_mode (TREE_TYPE (result_type),
1373 TYPE_MODE (result_type),
1374 true);
1375 }
1376 break;
1377
1378 case NULL_EXPR:
1379 result = operand;
1380 TREE_TYPE (result) = type = build_pointer_type (type);
1381 break;
1382
1383 case COMPOUND_EXPR:
1384 /* Fold a compound expression if it has unconstrained array type
1385 since the middle-end cannot handle it. But we don't it in the
1386 general case because it may introduce aliasing issues if the
1387 first operand is an indirect assignment and the second operand
1388 the corresponding address, e.g. for an allocator. */
1389 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1390 {
1391 result = build_unary_op (ADDR_EXPR, result_type,
1392 TREE_OPERAND (operand, 1));
1393 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1394 TREE_OPERAND (operand, 0), result);
1395 break;
1396 }
1397 goto common;
1398
1399 case ARRAY_REF:
1400 case ARRAY_RANGE_REF:
1401 case COMPONENT_REF:
1402 case BIT_FIELD_REF:
1403 /* If this is for 'Address, find the address of the prefix and add
1404 the offset to the field. Otherwise, do this the normal way. */
1405 if (op_code == ATTR_ADDR_EXPR)
1406 {
1407 HOST_WIDE_INT bitsize;
1408 HOST_WIDE_INT bitpos;
1409 tree offset, inner;
1410 machine_mode mode;
1411 int unsignedp, volatilep;
1412
1413 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1414 &mode, &unsignedp, &volatilep,
1415 false);
1416
1417 /* If INNER is a padding type whose field has a self-referential
1418 size, convert to that inner type. We know the offset is zero
1419 and we need to have that type visible. */
1420 if (type_is_padding_self_referential (TREE_TYPE (inner)))
1421 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1422 inner);
1423
1424 /* Compute the offset as a byte offset from INNER. */
1425 if (!offset)
1426 offset = size_zero_node;
1427
1428 offset = size_binop (PLUS_EXPR, offset,
1429 size_int (bitpos / BITS_PER_UNIT));
1430
1431 /* Take the address of INNER, convert it to a pointer to our type
1432 and add the offset. */
1433 inner = build_unary_op (ADDR_EXPR,
1434 build_pointer_type (TREE_TYPE (operand)),
1435 inner);
1436 result = build_binary_op (POINTER_PLUS_EXPR, TREE_TYPE (inner),
1437 inner, offset);
1438 break;
1439 }
1440 goto common;
1441
1442 case CONSTRUCTOR:
1443 /* If this is just a constructor for a padded record, we can
1444 just take the address of the single field and convert it to
1445 a pointer to our type. */
1446 if (TYPE_IS_PADDING_P (type))
1447 {
1448 result
1449 = build_unary_op (ADDR_EXPR,
1450 build_pointer_type (TREE_TYPE (operand)),
1451 CONSTRUCTOR_ELT (operand, 0)->value);
1452 break;
1453 }
1454 goto common;
1455
1456 case NOP_EXPR:
1457 if (AGGREGATE_TYPE_P (type)
1458 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1459 return build_unary_op (ADDR_EXPR, result_type,
1460 TREE_OPERAND (operand, 0));
1461
1462 /* ... fallthru ... */
1463
1464 case VIEW_CONVERT_EXPR:
1465 /* If this just a variant conversion or if the conversion doesn't
1466 change the mode, get the result type from this type and go down.
1467 This is needed for conversions of CONST_DECLs, to eventually get
1468 to the address of their CORRESPONDING_VARs. */
1469 if ((TYPE_MAIN_VARIANT (type)
1470 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1471 || (TYPE_MODE (type) != BLKmode
1472 && (TYPE_MODE (type)
1473 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1474 return build_unary_op (ADDR_EXPR,
1475 (result_type ? result_type
1476 : build_pointer_type (type)),
1477 TREE_OPERAND (operand, 0));
1478 goto common;
1479
1480 case CONST_DECL:
1481 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1482
1483 /* ... fall through ... */
1484
1485 default:
1486 common:
1487
1488 /* If we are taking the address of a padded record whose field
1489 contains a template, take the address of the field. */
1490 if (TYPE_IS_PADDING_P (type)
1491 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1492 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1493 {
1494 type = TREE_TYPE (TYPE_FIELDS (type));
1495 operand = convert (type, operand);
1496 }
1497
1498 gnat_mark_addressable (operand);
1499 result = build_fold_addr_expr (operand);
1500 }
1501
1502 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1503 break;
1504
1505 case INDIRECT_REF:
1506 {
1507 tree t = remove_conversions (operand, false);
1508 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1509
1510 /* If TYPE is a thin pointer, either first retrieve the base if this
1511 is an expression with an offset built for the initialization of an
1512 object with an unconstrained nominal subtype, or else convert to
1513 the fat pointer. */
1514 if (TYPE_IS_THIN_POINTER_P (type))
1515 {
1516 tree rec_type = TREE_TYPE (type);
1517
1518 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1519 && TREE_OPERAND (operand, 1)
1520 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1521 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1522 {
1523 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1524 type = TREE_TYPE (operand);
1525 }
1526 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1527 {
1528 operand
1529 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1530 operand);
1531 type = TREE_TYPE (operand);
1532 }
1533 }
1534
1535 /* If we want to refer to an unconstrained array, use the appropriate
1536 expression. But this will never survive down to the back-end. */
1537 if (TYPE_IS_FAT_POINTER_P (type))
1538 {
1539 result = build1 (UNCONSTRAINED_ARRAY_REF,
1540 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1541 TREE_READONLY (result)
1542 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1543 }
1544
1545 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1546 else if (TREE_CODE (operand) == ADDR_EXPR)
1547 result = TREE_OPERAND (operand, 0);
1548
1549 /* Otherwise, build and fold the indirect reference. */
1550 else
1551 {
1552 result = build_fold_indirect_ref (operand);
1553 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1554 }
1555
1556 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1557 {
1558 TREE_SIDE_EFFECTS (result) = 1;
1559 if (TREE_CODE (result) == INDIRECT_REF)
1560 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1561 }
1562
1563 if ((TREE_CODE (result) == INDIRECT_REF
1564 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1565 && can_never_be_null)
1566 TREE_THIS_NOTRAP (result) = 1;
1567
1568 break;
1569 }
1570
1571 case NEGATE_EXPR:
1572 case BIT_NOT_EXPR:
1573 {
1574 tree modulus = ((operation_type
1575 && TREE_CODE (operation_type) == INTEGER_TYPE
1576 && TYPE_MODULAR_P (operation_type))
1577 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1578 int mod_pow2 = modulus && integer_pow2p (modulus);
1579
1580 /* If this is a modular type, there are various possibilities
1581 depending on the operation and whether the modulus is a
1582 power of two or not. */
1583
1584 if (modulus)
1585 {
1586 gcc_assert (operation_type == base_type);
1587 operand = convert (operation_type, operand);
1588
1589 /* The fastest in the negate case for binary modulus is
1590 the straightforward code; the TRUNC_MOD_EXPR below
1591 is an AND operation. */
1592 if (op_code == NEGATE_EXPR && mod_pow2)
1593 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1594 fold_build1 (NEGATE_EXPR, operation_type,
1595 operand),
1596 modulus);
1597
1598 /* For nonbinary negate case, return zero for zero operand,
1599 else return the modulus minus the operand. If the modulus
1600 is a power of two minus one, we can do the subtraction
1601 as an XOR since it is equivalent and faster on most machines. */
1602 else if (op_code == NEGATE_EXPR && !mod_pow2)
1603 {
1604 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1605 modulus,
1606 convert (operation_type,
1607 integer_one_node))))
1608 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1609 operand, modulus);
1610 else
1611 result = fold_build2 (MINUS_EXPR, operation_type,
1612 modulus, operand);
1613
1614 result = fold_build3 (COND_EXPR, operation_type,
1615 fold_build2 (NE_EXPR,
1616 boolean_type_node,
1617 operand,
1618 convert
1619 (operation_type,
1620 integer_zero_node)),
1621 result, operand);
1622 }
1623 else
1624 {
1625 /* For the NOT cases, we need a constant equal to
1626 the modulus minus one. For a binary modulus, we
1627 XOR against the constant and subtract the operand from
1628 that constant for nonbinary modulus. */
1629
1630 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1631 convert (operation_type,
1632 integer_one_node));
1633
1634 if (mod_pow2)
1635 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1636 operand, cnst);
1637 else
1638 result = fold_build2 (MINUS_EXPR, operation_type,
1639 cnst, operand);
1640 }
1641
1642 break;
1643 }
1644 }
1645
1646 /* ... fall through ... */
1647
1648 default:
1649 gcc_assert (operation_type == base_type);
1650 result = fold_build1 (op_code, operation_type,
1651 convert (operation_type, operand));
1652 }
1653
1654 if (result_type && TREE_TYPE (result) != result_type)
1655 result = convert (result_type, result);
1656
1657 return result;
1658 }
1659 \f
1660 /* Similar, but for COND_EXPR. */
1661
1662 tree
1663 build_cond_expr (tree result_type, tree condition_operand,
1664 tree true_operand, tree false_operand)
1665 {
1666 bool addr_p = false;
1667 tree result;
1668
1669 /* The front-end verified that result, true and false operands have
1670 same base type. Convert everything to the result type. */
1671 true_operand = convert (result_type, true_operand);
1672 false_operand = convert (result_type, false_operand);
1673
1674 /* If the result type is unconstrained, take the address of the operands and
1675 then dereference the result. Likewise if the result type is passed by
1676 reference, because creating a temporary of this type is not allowed. */
1677 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1678 || TYPE_IS_BY_REFERENCE_P (result_type)
1679 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1680 {
1681 result_type = build_pointer_type (result_type);
1682 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1683 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1684 addr_p = true;
1685 }
1686
1687 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1688 true_operand, false_operand);
1689
1690 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1691 in both arms, make sure it gets evaluated by moving it ahead of the
1692 conditional expression. This is necessary because it is evaluated
1693 in only one place at run time and would otherwise be uninitialized
1694 in one of the arms. */
1695 true_operand = skip_simple_arithmetic (true_operand);
1696 false_operand = skip_simple_arithmetic (false_operand);
1697
1698 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1699 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1700
1701 if (addr_p)
1702 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1703
1704 return result;
1705 }
1706
1707 /* Similar, but for COMPOUND_EXPR. */
1708
1709 tree
1710 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1711 {
1712 bool addr_p = false;
1713 tree result;
1714
1715 /* If the result type is unconstrained, take the address of the operand and
1716 then dereference the result. Likewise if the result type is passed by
1717 reference, but this is natively handled in the gimplifier. */
1718 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1719 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1720 {
1721 result_type = build_pointer_type (result_type);
1722 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1723 addr_p = true;
1724 }
1725
1726 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1727 expr_operand);
1728
1729 if (addr_p)
1730 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1731
1732 return result;
1733 }
1734 \f
1735 /* Conveniently construct a function call expression. FNDECL names the
1736 function to be called, N is the number of arguments, and the "..."
1737 parameters are the argument expressions. Unlike build_call_expr
1738 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1739
1740 tree
1741 build_call_n_expr (tree fndecl, int n, ...)
1742 {
1743 va_list ap;
1744 tree fntype = TREE_TYPE (fndecl);
1745 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1746
1747 va_start (ap, n);
1748 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1749 va_end (ap);
1750 return fn;
1751 }
1752 \f
1753 /* Expand the SLOC of GNAT_NODE, if present, into tree location information
1754 pointed to by FILENAME, LINE and COL. Fall back to the current location
1755 if GNAT_NODE is absent or has no SLOC. */
1756
1757 static void
1758 expand_sloc (Node_Id gnat_node, tree *filename, tree *line, tree *col)
1759 {
1760 const char *str;
1761 int line_number, column_number;
1762
1763 if (Debug_Flag_NN || Exception_Locations_Suppressed)
1764 {
1765 str = "";
1766 line_number = 0;
1767 column_number = 0;
1768 }
1769 else if (Present (gnat_node) && Sloc (gnat_node) != No_Location)
1770 {
1771 str = Get_Name_String
1772 (Debug_Source_Name (Get_Source_File_Index (Sloc (gnat_node))));
1773 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1774 column_number = Get_Column_Number (Sloc (gnat_node));
1775 }
1776 else
1777 {
1778 str = lbasename (LOCATION_FILE (input_location));
1779 line_number = LOCATION_LINE (input_location);
1780 column_number = LOCATION_COLUMN (input_location);
1781 }
1782
1783 const int len = strlen (str);
1784 *filename = build_string (len, str);
1785 TREE_TYPE (*filename) = build_array_type (unsigned_char_type_node,
1786 build_index_type (size_int (len)));
1787 *line = build_int_cst (NULL_TREE, line_number);
1788 if (col)
1789 *col = build_int_cst (NULL_TREE, column_number);
1790 }
1791
1792 /* Build a call to a function that raises an exception and passes file name
1793 and line number, if requested. MSG says which exception function to call.
1794 GNAT_NODE is the node conveying the source location for which the error
1795 should be signaled, or Empty in which case the error is signaled for the
1796 current location. KIND says which kind of exception node this is for,
1797 among N_Raise_{Constraint,Storage,Program}_Error. */
1798
1799 tree
1800 build_call_raise (int msg, Node_Id gnat_node, char kind)
1801 {
1802 tree fndecl = gnat_raise_decls[msg];
1803 tree label = get_exception_label (kind);
1804 tree filename, line;
1805
1806 /* If this is to be done as a goto, handle that case. */
1807 if (label)
1808 {
1809 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1810 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1811
1812 /* If Local_Raise is present, build Local_Raise (Exception'Identity). */
1813 if (Present (local_raise))
1814 {
1815 tree gnu_local_raise
1816 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1817 tree gnu_exception_entity
1818 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1819 tree gnu_call
1820 = build_call_n_expr (gnu_local_raise, 1,
1821 build_unary_op (ADDR_EXPR, NULL_TREE,
1822 gnu_exception_entity));
1823 gnu_result
1824 = build2 (COMPOUND_EXPR, void_type_node, gnu_call, gnu_result);
1825 }
1826
1827 return gnu_result;
1828 }
1829
1830 expand_sloc (gnat_node, &filename, &line, NULL);
1831
1832 return
1833 build_call_n_expr (fndecl, 2,
1834 build1 (ADDR_EXPR,
1835 build_pointer_type (unsigned_char_type_node),
1836 filename),
1837 line);
1838 }
1839
1840 /* Similar to build_call_raise, with extra information about the column
1841 where the check failed. */
1842
1843 tree
1844 build_call_raise_column (int msg, Node_Id gnat_node)
1845 {
1846 tree fndecl = gnat_raise_decls_ext[msg];
1847 tree filename, line, col;
1848
1849 expand_sloc (gnat_node, &filename, &line, &col);
1850
1851 return
1852 build_call_n_expr (fndecl, 3,
1853 build1 (ADDR_EXPR,
1854 build_pointer_type (unsigned_char_type_node),
1855 filename),
1856 line, col);
1857 }
1858
1859 /* Similar to build_call_raise_column, for an index or range check exception ,
1860 with extra information of the form "INDEX out of range FIRST..LAST". */
1861
1862 tree
1863 build_call_raise_range (int msg, Node_Id gnat_node,
1864 tree index, tree first, tree last)
1865 {
1866 tree fndecl = gnat_raise_decls_ext[msg];
1867 tree filename, line, col;
1868
1869 expand_sloc (gnat_node, &filename, &line, &col);
1870
1871 return
1872 build_call_n_expr (fndecl, 6,
1873 build1 (ADDR_EXPR,
1874 build_pointer_type (unsigned_char_type_node),
1875 filename),
1876 line, col,
1877 convert (integer_type_node, index),
1878 convert (integer_type_node, first),
1879 convert (integer_type_node, last));
1880 }
1881 \f
1882 /* qsort comparer for the bit positions of two constructor elements
1883 for record components. */
1884
1885 static int
1886 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1887 {
1888 const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
1889 const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
1890 const_tree const field1 = elmt1->index;
1891 const_tree const field2 = elmt2->index;
1892 const int ret
1893 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1894
1895 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1896 }
1897
1898 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1899
1900 tree
1901 gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
1902 {
1903 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1904 bool read_only = true;
1905 bool side_effects = false;
1906 tree result, obj, val;
1907 unsigned int n_elmts;
1908
1909 /* Scan the elements to see if they are all constant or if any has side
1910 effects, to let us set global flags on the resulting constructor. Count
1911 the elements along the way for possible sorting purposes below. */
1912 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1913 {
1914 /* The predicate must be in keeping with output_constructor. */
1915 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1916 || (TREE_CODE (type) == RECORD_TYPE
1917 && CONSTRUCTOR_BITFIELD_P (obj)
1918 && !initializer_constant_valid_for_bitfield_p (val))
1919 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1920 allconstant = false;
1921
1922 if (!TREE_READONLY (val))
1923 read_only = false;
1924
1925 if (TREE_SIDE_EFFECTS (val))
1926 side_effects = true;
1927 }
1928
1929 /* For record types with constant components only, sort field list
1930 by increasing bit position. This is necessary to ensure the
1931 constructor can be output as static data. */
1932 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1933 v->qsort (compare_elmt_bitpos);
1934
1935 result = build_constructor (type, v);
1936 CONSTRUCTOR_NO_CLEARING (result) = 1;
1937 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1938 TREE_SIDE_EFFECTS (result) = side_effects;
1939 TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
1940 return result;
1941 }
1942 \f
1943 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1944 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1945 for the field. Don't fold the result if NO_FOLD_P is true.
1946
1947 We also handle the fact that we might have been passed a pointer to the
1948 actual record and know how to look for fields in variant parts. */
1949
1950 tree
1951 build_simple_component_ref (tree record_variable, tree component, tree field,
1952 bool no_fold_p)
1953 {
1954 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1955 tree base, ref;
1956
1957 gcc_assert (RECORD_OR_UNION_TYPE_P (record_type)
1958 && COMPLETE_TYPE_P (record_type)
1959 && (component == NULL_TREE) != (field == NULL_TREE));
1960
1961 /* If no field was specified, look for a field with the specified name in
1962 the current record only. */
1963 if (!field)
1964 for (field = TYPE_FIELDS (record_type);
1965 field;
1966 field = DECL_CHAIN (field))
1967 if (DECL_NAME (field) == component)
1968 break;
1969
1970 if (!field)
1971 return NULL_TREE;
1972
1973 /* If this field is not in the specified record, see if we can find a field
1974 in the specified record whose original field is the same as this one. */
1975 if (DECL_CONTEXT (field) != record_type)
1976 {
1977 tree new_field;
1978
1979 /* First loop through normal components. */
1980 for (new_field = TYPE_FIELDS (record_type);
1981 new_field;
1982 new_field = DECL_CHAIN (new_field))
1983 if (SAME_FIELD_P (field, new_field))
1984 break;
1985
1986 /* Next, see if we're looking for an inherited component in an extension.
1987 If so, look through the extension directly, unless the type contains
1988 a placeholder, as it might be needed for a later substitution. */
1989 if (!new_field
1990 && TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1991 && TYPE_ALIGN_OK (record_type)
1992 && !type_contains_placeholder_p (record_type)
1993 && TREE_CODE (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1994 == RECORD_TYPE
1995 && TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (record_variable, 0))))
1996 {
1997 ref = build_simple_component_ref (TREE_OPERAND (record_variable, 0),
1998 NULL_TREE, field, no_fold_p);
1999 if (ref)
2000 return ref;
2001 }
2002
2003 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
2004 component in the first search. Doing this search in two steps is
2005 required to avoid hidden homonymous fields in the _Parent field. */
2006 if (!new_field)
2007 for (new_field = TYPE_FIELDS (record_type);
2008 new_field;
2009 new_field = DECL_CHAIN (new_field))
2010 if (DECL_INTERNAL_P (new_field))
2011 {
2012 tree field_ref
2013 = build_simple_component_ref (record_variable,
2014 NULL_TREE, new_field, no_fold_p);
2015 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
2016 no_fold_p);
2017 if (ref)
2018 return ref;
2019 }
2020
2021 field = new_field;
2022 }
2023
2024 if (!field)
2025 return NULL_TREE;
2026
2027 /* If the field's offset has overflowed, do not try to access it, as doing
2028 so may trigger sanity checks deeper in the back-end. Note that we don't
2029 need to warn since this will be done on trying to declare the object. */
2030 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
2031 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
2032 return NULL_TREE;
2033
2034 /* We have found a suitable field. Before building the COMPONENT_REF, get
2035 the base object of the record variable if possible. */
2036 base = record_variable;
2037
2038 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR)
2039 {
2040 tree inner_variable = TREE_OPERAND (record_variable, 0);
2041 tree inner_type = TYPE_MAIN_VARIANT (TREE_TYPE (inner_variable));
2042
2043 /* Look through a conversion between type variants. This is transparent
2044 as far as the field is concerned. */
2045 if (inner_type == record_type)
2046 base = inner_variable;
2047
2048 /* Look through a conversion between original and packable version, but
2049 the field needs to be adjusted in this case. */
2050 else if (RECORD_OR_UNION_TYPE_P (inner_type)
2051 && TYPE_NAME (inner_type) == TYPE_NAME (record_type))
2052 {
2053 tree new_field;
2054
2055 for (new_field = TYPE_FIELDS (inner_type);
2056 new_field;
2057 new_field = DECL_CHAIN (new_field))
2058 if (SAME_FIELD_P (field, new_field))
2059 break;
2060 if (new_field)
2061 {
2062 field = new_field;
2063 base = inner_variable;
2064 }
2065 }
2066 }
2067
2068 ref = build3 (COMPONENT_REF, TREE_TYPE (field), base, field, NULL_TREE);
2069
2070 if (TREE_READONLY (record_variable)
2071 || TREE_READONLY (field)
2072 || TYPE_READONLY (record_type))
2073 TREE_READONLY (ref) = 1;
2074
2075 if (TREE_THIS_VOLATILE (record_variable)
2076 || TREE_THIS_VOLATILE (field)
2077 || TYPE_VOLATILE (record_type))
2078 TREE_THIS_VOLATILE (ref) = 1;
2079
2080 if (no_fold_p)
2081 return ref;
2082
2083 /* The generic folder may punt in this case because the inner array type
2084 can be self-referential, but folding is in fact not problematic. */
2085 if (TREE_CODE (base) == CONSTRUCTOR
2086 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (base)))
2087 {
2088 unsigned int len = CONSTRUCTOR_NELTS (base);
2089 gcc_assert (len > 0);
2090
2091 if (field == CONSTRUCTOR_ELT (base, 0)->index)
2092 return CONSTRUCTOR_ELT (base, 0)->value;
2093
2094 if (len > 1)
2095 {
2096 if (field == CONSTRUCTOR_ELT (base, 1)->index)
2097 return CONSTRUCTOR_ELT (base, 1)->value;
2098 }
2099 else
2100 return NULL_TREE;
2101
2102 return ref;
2103 }
2104
2105 return fold (ref);
2106 }
2107
2108 /* Likewise, but generate a Constraint_Error if the reference could not be
2109 found. */
2110
2111 tree
2112 build_component_ref (tree record_variable, tree component, tree field,
2113 bool no_fold_p)
2114 {
2115 tree ref = build_simple_component_ref (record_variable, component, field,
2116 no_fold_p);
2117 if (ref)
2118 return ref;
2119
2120 /* If FIELD was specified, assume this is an invalid user field so raise
2121 Constraint_Error. Otherwise, we have no type to return so abort. */
2122 gcc_assert (field);
2123 return build1 (NULL_EXPR, TREE_TYPE (field),
2124 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2125 N_Raise_Constraint_Error));
2126 }
2127 \f
2128 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2129 identically. Process the case where a GNAT_PROC to call is provided. */
2130
2131 static inline tree
2132 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2133 Entity_Id gnat_proc, Entity_Id gnat_pool)
2134 {
2135 tree gnu_proc = gnat_to_gnu (gnat_proc);
2136 tree gnu_call;
2137
2138 /* A storage pool's underlying type is a record type (for both predefined
2139 storage pools and GNAT simple storage pools). The secondary stack uses
2140 the same mechanism, but its pool object (SS_Pool) is an integer. */
2141 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2142 {
2143 /* The size is the third parameter; the alignment is the
2144 same type. */
2145 Entity_Id gnat_size_type
2146 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2147 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2148
2149 tree gnu_pool = gnat_to_gnu (gnat_pool);
2150 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2151 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2152
2153 gnu_size = convert (gnu_size_type, gnu_size);
2154 gnu_align = convert (gnu_size_type, gnu_align);
2155
2156 /* The first arg is always the address of the storage pool; next
2157 comes the address of the object, for a deallocator, then the
2158 size and alignment. */
2159 if (gnu_obj)
2160 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2161 gnu_size, gnu_align);
2162 else
2163 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2164 gnu_size, gnu_align);
2165 }
2166
2167 /* Secondary stack case. */
2168 else
2169 {
2170 /* The size is the second parameter. */
2171 Entity_Id gnat_size_type
2172 = Etype (Next_Formal (First_Formal (gnat_proc)));
2173 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2174
2175 gnu_size = convert (gnu_size_type, gnu_size);
2176
2177 /* The first arg is the address of the object, for a deallocator,
2178 then the size. */
2179 if (gnu_obj)
2180 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2181 else
2182 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2183 }
2184
2185 return gnu_call;
2186 }
2187
2188 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2189 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2190 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2191 latter offers. */
2192
2193 static inline tree
2194 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2195 {
2196 /* When the DATA_TYPE alignment is stricter than what malloc offers
2197 (super-aligned case), we allocate an "aligning" wrapper type and return
2198 the address of its single data field with the malloc's return value
2199 stored just in front. */
2200
2201 unsigned int data_align = TYPE_ALIGN (data_type);
2202 unsigned int system_allocator_alignment
2203 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2204
2205 tree aligning_type
2206 = ((data_align > system_allocator_alignment)
2207 ? make_aligning_type (data_type, data_align, data_size,
2208 system_allocator_alignment,
2209 POINTER_SIZE / BITS_PER_UNIT,
2210 gnat_node)
2211 : NULL_TREE);
2212
2213 tree size_to_malloc
2214 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2215
2216 tree malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2217
2218 if (aligning_type)
2219 {
2220 /* Latch malloc's return value and get a pointer to the aligning field
2221 first. */
2222 tree storage_ptr = gnat_protect_expr (malloc_ptr);
2223
2224 tree aligning_record_addr
2225 = convert (build_pointer_type (aligning_type), storage_ptr);
2226
2227 tree aligning_record
2228 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2229
2230 tree aligning_field
2231 = build_component_ref (aligning_record, NULL_TREE,
2232 TYPE_FIELDS (aligning_type), false);
2233
2234 tree aligning_field_addr
2235 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2236
2237 /* Then arrange to store the allocator's return value ahead
2238 and return. */
2239 tree storage_ptr_slot_addr
2240 = build_binary_op (POINTER_PLUS_EXPR, ptr_type_node,
2241 convert (ptr_type_node, aligning_field_addr),
2242 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2243 / BITS_PER_UNIT));
2244
2245 tree storage_ptr_slot
2246 = build_unary_op (INDIRECT_REF, NULL_TREE,
2247 convert (build_pointer_type (ptr_type_node),
2248 storage_ptr_slot_addr));
2249
2250 return
2251 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2252 build_binary_op (INIT_EXPR, NULL_TREE,
2253 storage_ptr_slot, storage_ptr),
2254 aligning_field_addr);
2255 }
2256 else
2257 return malloc_ptr;
2258 }
2259
2260 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2261 designated by DATA_PTR using the __gnat_free entry point. */
2262
2263 static inline tree
2264 maybe_wrap_free (tree data_ptr, tree data_type)
2265 {
2266 /* In the regular alignment case, we pass the data pointer straight to free.
2267 In the superaligned case, we need to retrieve the initial allocator
2268 return value, stored in front of the data block at allocation time. */
2269
2270 unsigned int data_align = TYPE_ALIGN (data_type);
2271 unsigned int system_allocator_alignment
2272 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2273
2274 tree free_ptr;
2275
2276 if (data_align > system_allocator_alignment)
2277 {
2278 /* DATA_FRONT_PTR (void *)
2279 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2280 tree data_front_ptr
2281 = build_binary_op
2282 (POINTER_PLUS_EXPR, ptr_type_node,
2283 convert (ptr_type_node, data_ptr),
2284 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2285
2286 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2287 free_ptr
2288 = build_unary_op
2289 (INDIRECT_REF, NULL_TREE,
2290 convert (build_pointer_type (ptr_type_node), data_front_ptr));
2291 }
2292 else
2293 free_ptr = data_ptr;
2294
2295 return build_call_n_expr (free_decl, 1, free_ptr);
2296 }
2297
2298 /* Build a GCC tree to call an allocation or deallocation function.
2299 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2300 generate an allocator.
2301
2302 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2303 object type, used to determine the to-be-honored address alignment.
2304 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2305 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2306 to provide an error location for restriction violation messages. */
2307
2308 tree
2309 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2310 Entity_Id gnat_proc, Entity_Id gnat_pool,
2311 Node_Id gnat_node)
2312 {
2313 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
2314
2315 /* Explicit proc to call ? This one is assumed to deal with the type
2316 alignment constraints. */
2317 if (Present (gnat_proc))
2318 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2319 gnat_proc, gnat_pool);
2320
2321 /* Otherwise, object to "free" or "malloc" with possible special processing
2322 for alignments stricter than what the default allocator honors. */
2323 else if (gnu_obj)
2324 return maybe_wrap_free (gnu_obj, gnu_type);
2325 else
2326 {
2327 /* Assert that we no longer can be called with this special pool. */
2328 gcc_assert (gnat_pool != -1);
2329
2330 /* Check that we aren't violating the associated restriction. */
2331 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2332 {
2333 Check_No_Implicit_Heap_Alloc (gnat_node);
2334 if (Has_Task (Etype (gnat_node)))
2335 Check_No_Implicit_Task_Alloc (gnat_node);
2336 if (Has_Protected (Etype (gnat_node)))
2337 Check_No_Implicit_Protected_Alloc (gnat_node);
2338 }
2339 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2340 }
2341 }
2342 \f
2343 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2344 initial value is INIT, if INIT is nonzero. Convert the expression to
2345 RESULT_TYPE, which must be some pointer type, and return the result.
2346
2347 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2348 the storage pool to use. GNAT_NODE is used to provide an error
2349 location for restriction violation messages. If IGNORE_INIT_TYPE is
2350 true, ignore the type of INIT for the purpose of determining the size;
2351 this will cause the maximum size to be allocated if TYPE is of
2352 self-referential size. */
2353
2354 tree
2355 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2356 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2357 {
2358 tree size, storage, storage_deref, storage_init;
2359
2360 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2361 if (init && TREE_CODE (init) == NULL_EXPR)
2362 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2363
2364 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2365 else if (init && TREE_CODE (init) == COND_EXPR)
2366 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2367 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2368 gnat_proc, gnat_pool, gnat_node,
2369 ignore_init_type),
2370 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2371 gnat_proc, gnat_pool, gnat_node,
2372 ignore_init_type));
2373
2374 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2375 sizes of the object and its template. Allocate the whole thing and
2376 fill in the parts that are known. */
2377 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2378 {
2379 tree storage_type
2380 = build_unc_object_type_from_ptr (result_type, type,
2381 get_identifier ("ALLOC"), false);
2382 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2383 tree storage_ptr_type = build_pointer_type (storage_type);
2384
2385 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2386 init);
2387
2388 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2389 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2390 size = size_int (-1);
2391
2392 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2393 gnat_proc, gnat_pool, gnat_node);
2394 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2395 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2396 TREE_THIS_NOTRAP (storage_deref) = 1;
2397
2398 /* If there is an initializing expression, then make a constructor for
2399 the entire object including the bounds and copy it into the object.
2400 If there is no initializing expression, just set the bounds. */
2401 if (init)
2402 {
2403 vec<constructor_elt, va_gc> *v;
2404 vec_alloc (v, 2);
2405
2406 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2407 build_template (template_type, type, init));
2408 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2409 init);
2410 storage_init
2411 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2412 gnat_build_constructor (storage_type, v));
2413 }
2414 else
2415 storage_init
2416 = build_binary_op (INIT_EXPR, NULL_TREE,
2417 build_component_ref (storage_deref, NULL_TREE,
2418 TYPE_FIELDS (storage_type),
2419 false),
2420 build_template (template_type, type, NULL_TREE));
2421
2422 return build2 (COMPOUND_EXPR, result_type,
2423 storage_init, convert (result_type, storage));
2424 }
2425
2426 size = TYPE_SIZE_UNIT (type);
2427
2428 /* If we have an initializing expression, see if its size is simpler
2429 than the size from the type. */
2430 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2431 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2432 || CONTAINS_PLACEHOLDER_P (size)))
2433 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2434
2435 /* If the size is still self-referential, reference the initializing
2436 expression, if it is present. If not, this must have been a
2437 call to allocate a library-level object, in which case we use
2438 the maximum size. */
2439 if (CONTAINS_PLACEHOLDER_P (size))
2440 {
2441 if (!ignore_init_type && init)
2442 size = substitute_placeholder_in_expr (size, init);
2443 else
2444 size = max_size (size, true);
2445 }
2446
2447 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2448 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2449 size = size_int (-1);
2450
2451 storage = convert (result_type,
2452 build_call_alloc_dealloc (NULL_TREE, size, type,
2453 gnat_proc, gnat_pool,
2454 gnat_node));
2455
2456 /* If we have an initial value, protect the new address, assign the value
2457 and return the address with a COMPOUND_EXPR. */
2458 if (init)
2459 {
2460 storage = gnat_protect_expr (storage);
2461 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2462 TREE_THIS_NOTRAP (storage_deref) = 1;
2463 storage_init
2464 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2465 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2466 }
2467
2468 return storage;
2469 }
2470 \f
2471 /* Indicate that we need to take the address of T and that it therefore
2472 should not be allocated in a register. Returns true if successful. */
2473
2474 bool
2475 gnat_mark_addressable (tree t)
2476 {
2477 while (true)
2478 switch (TREE_CODE (t))
2479 {
2480 case ADDR_EXPR:
2481 case COMPONENT_REF:
2482 case ARRAY_REF:
2483 case ARRAY_RANGE_REF:
2484 case REALPART_EXPR:
2485 case IMAGPART_EXPR:
2486 case VIEW_CONVERT_EXPR:
2487 case NON_LVALUE_EXPR:
2488 CASE_CONVERT:
2489 t = TREE_OPERAND (t, 0);
2490 break;
2491
2492 case COMPOUND_EXPR:
2493 t = TREE_OPERAND (t, 1);
2494 break;
2495
2496 case CONSTRUCTOR:
2497 TREE_ADDRESSABLE (t) = 1;
2498 return true;
2499
2500 case VAR_DECL:
2501 case PARM_DECL:
2502 case RESULT_DECL:
2503 TREE_ADDRESSABLE (t) = 1;
2504 return true;
2505
2506 case FUNCTION_DECL:
2507 TREE_ADDRESSABLE (t) = 1;
2508 return true;
2509
2510 case CONST_DECL:
2511 return DECL_CONST_CORRESPONDING_VAR (t)
2512 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2513
2514 default:
2515 return true;
2516 }
2517 }
2518 \f
2519 /* Return true if EXP is a stable expression for the purpose of the functions
2520 below and, therefore, can be returned unmodified by them. We accept things
2521 that are actual constants or that have already been handled. */
2522
2523 static bool
2524 gnat_stable_expr_p (tree exp)
2525 {
2526 enum tree_code code = TREE_CODE (exp);
2527 return TREE_CONSTANT (exp) || code == NULL_EXPR || code == SAVE_EXPR;
2528 }
2529
2530 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2531 but we know how to handle our own nodes. */
2532
2533 tree
2534 gnat_save_expr (tree exp)
2535 {
2536 tree type = TREE_TYPE (exp);
2537 enum tree_code code = TREE_CODE (exp);
2538
2539 if (gnat_stable_expr_p (exp))
2540 return exp;
2541
2542 if (code == UNCONSTRAINED_ARRAY_REF)
2543 {
2544 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2545 TREE_READONLY (t) = TYPE_READONLY (type);
2546 return t;
2547 }
2548
2549 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2550 This may be more efficient, but will also allow us to more easily find
2551 the match for the PLACEHOLDER_EXPR. */
2552 if (code == COMPONENT_REF
2553 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2554 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2555 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2556
2557 return save_expr (exp);
2558 }
2559
2560 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2561 is optimized under the assumption that EXP's value doesn't change before
2562 its subsequent reuse(s) except through its potential reevaluation. */
2563
2564 tree
2565 gnat_protect_expr (tree exp)
2566 {
2567 tree type = TREE_TYPE (exp);
2568 enum tree_code code = TREE_CODE (exp);
2569
2570 if (gnat_stable_expr_p (exp))
2571 return exp;
2572
2573 /* If EXP has no side effects, we theoretically don't need to do anything.
2574 However, we may be recursively passed more and more complex expressions
2575 involving checks which will be reused multiple times and eventually be
2576 unshared for gimplification; in order to avoid a complexity explosion
2577 at that point, we protect any expressions more complex than a simple
2578 arithmetic expression. */
2579 if (!TREE_SIDE_EFFECTS (exp))
2580 {
2581 tree inner = skip_simple_arithmetic (exp);
2582 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2583 return exp;
2584 }
2585
2586 /* If this is a conversion, protect what's inside the conversion. */
2587 if (code == NON_LVALUE_EXPR
2588 || CONVERT_EXPR_CODE_P (code)
2589 || code == VIEW_CONVERT_EXPR)
2590 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2591
2592 /* If we're indirectly referencing something, we only need to protect the
2593 address since the data itself can't change in these situations. */
2594 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2595 {
2596 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2597 TREE_READONLY (t) = TYPE_READONLY (type);
2598 return t;
2599 }
2600
2601 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2602 This may be more efficient, but will also allow us to more easily find
2603 the match for the PLACEHOLDER_EXPR. */
2604 if (code == COMPONENT_REF
2605 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2606 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2607 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2608
2609 /* If this is a fat pointer or something that can be placed in a register,
2610 just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
2611 returned via invisible reference in most ABIs so the temporary will
2612 directly be filled by the callee. */
2613 if (TYPE_IS_FAT_POINTER_P (type)
2614 || TYPE_MODE (type) != BLKmode
2615 || code == CALL_EXPR)
2616 return save_expr (exp);
2617
2618 /* Otherwise reference, protect the address and dereference. */
2619 return
2620 build_unary_op (INDIRECT_REF, type,
2621 save_expr (build_unary_op (ADDR_EXPR,
2622 build_reference_type (type),
2623 exp)));
2624 }
2625
2626 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2627 argument to force evaluation of everything. */
2628
2629 static tree
2630 gnat_stabilize_reference_1 (tree e, void *data)
2631 {
2632 const bool force = *(bool *)data;
2633 enum tree_code code = TREE_CODE (e);
2634 tree type = TREE_TYPE (e);
2635 tree result;
2636
2637 if (gnat_stable_expr_p (e))
2638 return e;
2639
2640 switch (TREE_CODE_CLASS (code))
2641 {
2642 case tcc_exceptional:
2643 case tcc_declaration:
2644 case tcc_comparison:
2645 case tcc_expression:
2646 case tcc_reference:
2647 case tcc_vl_exp:
2648 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2649 fat pointer. This may be more efficient, but will also allow
2650 us to more easily find the match for the PLACEHOLDER_EXPR. */
2651 if (code == COMPONENT_REF
2652 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2653 result
2654 = build3 (code, type,
2655 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2656 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2657 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2658 so that it will only be evaluated once. */
2659 /* The tcc_reference and tcc_comparison classes could be handled as
2660 below, but it is generally faster to only evaluate them once. */
2661 else if (TREE_SIDE_EFFECTS (e) || force)
2662 return save_expr (e);
2663 else
2664 return e;
2665 break;
2666
2667 case tcc_binary:
2668 /* Recursively stabilize each operand. */
2669 result
2670 = build2 (code, type,
2671 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2672 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), data));
2673 break;
2674
2675 case tcc_unary:
2676 /* Recursively stabilize each operand. */
2677 result
2678 = build1 (code, type,
2679 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data));
2680 break;
2681
2682 default:
2683 gcc_unreachable ();
2684 }
2685
2686 TREE_READONLY (result) = TREE_READONLY (e);
2687 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2688 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2689
2690 return result;
2691 }
2692
2693 /* This is equivalent to stabilize_reference in tree.c but we know how to
2694 handle our own nodes and we take extra arguments. FORCE says whether to
2695 force evaluation of everything in REF. INIT is set to the first arm of
2696 a COMPOUND_EXPR present in REF, if any. */
2697
2698 tree
2699 gnat_stabilize_reference (tree ref, bool force, tree *init)
2700 {
2701 return
2702 gnat_rewrite_reference (ref, gnat_stabilize_reference_1, &force, init);
2703 }
2704
2705 /* Rewrite reference REF and call FUNC on each expression within REF in the
2706 process. DATA is passed unmodified to FUNC. INIT is set to the first
2707 arm of a COMPOUND_EXPR present in REF, if any. */
2708
2709 tree
2710 gnat_rewrite_reference (tree ref, rewrite_fn func, void *data, tree *init)
2711 {
2712 tree type = TREE_TYPE (ref);
2713 enum tree_code code = TREE_CODE (ref);
2714 tree result;
2715
2716 switch (code)
2717 {
2718 case CONST_DECL:
2719 case VAR_DECL:
2720 case PARM_DECL:
2721 case RESULT_DECL:
2722 /* No action is needed in this case. */
2723 return ref;
2724
2725 CASE_CONVERT:
2726 case FLOAT_EXPR:
2727 case FIX_TRUNC_EXPR:
2728 case VIEW_CONVERT_EXPR:
2729 result
2730 = build1 (code, type,
2731 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2732 init));
2733 break;
2734
2735 case INDIRECT_REF:
2736 case UNCONSTRAINED_ARRAY_REF:
2737 result = build1 (code, type, func (TREE_OPERAND (ref, 0), data));
2738 break;
2739
2740 case COMPONENT_REF:
2741 result = build3 (COMPONENT_REF, type,
2742 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2743 data, init),
2744 TREE_OPERAND (ref, 1), NULL_TREE);
2745 break;
2746
2747 case BIT_FIELD_REF:
2748 result = build3 (BIT_FIELD_REF, type,
2749 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2750 data, init),
2751 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
2752 break;
2753
2754 case ARRAY_REF:
2755 case ARRAY_RANGE_REF:
2756 result
2757 = build4 (code, type,
2758 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2759 init),
2760 func (TREE_OPERAND (ref, 1), data),
2761 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
2762 break;
2763
2764 case COMPOUND_EXPR:
2765 gcc_assert (*init == NULL_TREE);
2766 *init = TREE_OPERAND (ref, 0);
2767 /* We expect only the pattern built in Call_to_gnu. */
2768 gcc_assert (DECL_P (TREE_OPERAND (ref, 1))
2769 || (TREE_CODE (TREE_OPERAND (ref, 1)) == COMPONENT_REF
2770 && DECL_P (TREE_OPERAND (TREE_OPERAND (ref, 1), 0))));
2771 return TREE_OPERAND (ref, 1);
2772
2773 case CALL_EXPR:
2774 {
2775 /* This can only be an atomic load. */
2776 gcc_assert (call_is_atomic_load (ref));
2777
2778 /* An atomic load is an INDIRECT_REF of its first argument. */
2779 tree t = CALL_EXPR_ARG (ref, 0);
2780 if (TREE_CODE (t) == NOP_EXPR)
2781 t = TREE_OPERAND (t, 0);
2782 if (TREE_CODE (t) == ADDR_EXPR)
2783 t = build1 (ADDR_EXPR, TREE_TYPE (t),
2784 gnat_rewrite_reference (TREE_OPERAND (t, 0), func, data,
2785 init));
2786 else
2787 t = func (t, data);
2788 t = fold_convert (TREE_TYPE (CALL_EXPR_ARG (ref, 0)), t);
2789
2790 result = build_call_expr (TREE_OPERAND (CALL_EXPR_FN (ref), 0), 2,
2791 t, CALL_EXPR_ARG (ref, 1));
2792 }
2793 break;
2794
2795 case ERROR_MARK:
2796 return error_mark_node;
2797
2798 default:
2799 gcc_unreachable ();
2800 }
2801
2802 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2803 may not be sustained across some paths, such as the way via build1 for
2804 INDIRECT_REF. We reset those flags here in the general case, which is
2805 consistent with the GCC version of this routine.
2806
2807 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2808 paths introduce side-effects where there was none initially (e.g. if a
2809 SAVE_EXPR is built) and we also want to keep track of that. */
2810 TREE_READONLY (result) = TREE_READONLY (ref);
2811 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2812 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2813
2814 if (code == INDIRECT_REF
2815 || code == UNCONSTRAINED_ARRAY_REF
2816 || code == ARRAY_REF
2817 || code == ARRAY_RANGE_REF)
2818 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2819
2820 return result;
2821 }
2822
2823 /* This is equivalent to get_inner_reference in expr.c but it returns the
2824 ultimate containing object only if the reference (lvalue) is constant,
2825 i.e. if it doesn't depend on the context in which it is evaluated. */
2826
2827 tree
2828 get_inner_constant_reference (tree exp)
2829 {
2830 while (true)
2831 {
2832 switch (TREE_CODE (exp))
2833 {
2834 case BIT_FIELD_REF:
2835 break;
2836
2837 case COMPONENT_REF:
2838 if (TREE_OPERAND (exp, 2) != NULL_TREE)
2839 return NULL_TREE;
2840
2841 if (!TREE_CONSTANT (DECL_FIELD_OFFSET (TREE_OPERAND (exp, 1))))
2842 return NULL_TREE;
2843 break;
2844
2845 case ARRAY_REF:
2846 case ARRAY_RANGE_REF:
2847 {
2848 if (TREE_OPERAND (exp, 2) != NULL_TREE
2849 || TREE_OPERAND (exp, 3) != NULL_TREE)
2850 return NULL_TREE;
2851
2852 tree array_type = TREE_TYPE (TREE_OPERAND (exp, 0));
2853 if (!TREE_CONSTANT (TREE_OPERAND (exp, 1))
2854 || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
2855 || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type))))
2856 return NULL_TREE;
2857 }
2858 break;
2859
2860 case REALPART_EXPR:
2861 case IMAGPART_EXPR:
2862 case VIEW_CONVERT_EXPR:
2863 break;
2864
2865 default:
2866 goto done;
2867 }
2868
2869 exp = TREE_OPERAND (exp, 0);
2870 }
2871
2872 done:
2873 return exp;
2874 }
2875
2876 /* If EXPR is an expression that is invariant in the current function, in the
2877 sense that it can be evaluated anywhere in the function and any number of
2878 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2879
2880 tree
2881 gnat_invariant_expr (tree expr)
2882 {
2883 tree type = TREE_TYPE (expr), t;
2884
2885 expr = remove_conversions (expr, false);
2886
2887 while ((TREE_CODE (expr) == CONST_DECL
2888 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2889 && decl_function_context (expr) == current_function_decl
2890 && DECL_INITIAL (expr))
2891 {
2892 expr = DECL_INITIAL (expr);
2893 /* Look into CONSTRUCTORs built to initialize padded types. */
2894 if (TYPE_IS_PADDING_P (TREE_TYPE (expr)))
2895 expr = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (expr))), expr);
2896 expr = remove_conversions (expr, false);
2897 }
2898
2899 /* We are only interested in scalar types at the moment and, even if we may
2900 have gone through padding types in the above loop, we must be back to a
2901 scalar value at this point. */
2902 if (AGGREGATE_TYPE_P (TREE_TYPE (expr)))
2903 return NULL_TREE;
2904
2905 if (TREE_CONSTANT (expr))
2906 return fold_convert (type, expr);
2907
2908 t = expr;
2909
2910 while (true)
2911 {
2912 switch (TREE_CODE (t))
2913 {
2914 case COMPONENT_REF:
2915 if (TREE_OPERAND (t, 2) != NULL_TREE)
2916 return NULL_TREE;
2917 break;
2918
2919 case ARRAY_REF:
2920 case ARRAY_RANGE_REF:
2921 if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
2922 || TREE_OPERAND (t, 2) != NULL_TREE
2923 || TREE_OPERAND (t, 3) != NULL_TREE)
2924 return NULL_TREE;
2925 break;
2926
2927 case BIT_FIELD_REF:
2928 case VIEW_CONVERT_EXPR:
2929 case REALPART_EXPR:
2930 case IMAGPART_EXPR:
2931 break;
2932
2933 case INDIRECT_REF:
2934 if (!TREE_READONLY (t)
2935 || TREE_SIDE_EFFECTS (t)
2936 || !TREE_THIS_NOTRAP (t))
2937 return NULL_TREE;
2938 break;
2939
2940 default:
2941 goto object;
2942 }
2943
2944 t = TREE_OPERAND (t, 0);
2945 }
2946
2947 object:
2948 if (TREE_SIDE_EFFECTS (t))
2949 return NULL_TREE;
2950
2951 if (TREE_CODE (t) == CONST_DECL
2952 && (DECL_EXTERNAL (t)
2953 || decl_function_context (t) != current_function_decl))
2954 return fold_convert (type, expr);
2955
2956 if (!TREE_READONLY (t))
2957 return NULL_TREE;
2958
2959 if (TREE_CODE (t) == PARM_DECL)
2960 return fold_convert (type, expr);
2961
2962 if (TREE_CODE (t) == VAR_DECL
2963 && (DECL_EXTERNAL (t)
2964 || decl_function_context (t) != current_function_decl))
2965 return fold_convert (type, expr);
2966
2967 return NULL_TREE;
2968 }
This page took 0.175982 seconds and 6 git commands to generate.