]> gcc.gnu.org Git - gcc.git/blob - gcc/ada/gcc-interface/utils2.c
utils.c (convert_to_fat_pointer): Un-obfuscate the conversion from a thin pointer...
[gcc.git] / gcc / ada / gcc-interface / utils2.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2014, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "stor-layout.h"
32 #include "stringpool.h"
33 #include "varasm.h"
34 #include "flags.h"
35 #include "toplev.h"
36 #include "ggc.h"
37 #include "tree-inline.h"
38
39 #include "ada.h"
40 #include "types.h"
41 #include "atree.h"
42 #include "elists.h"
43 #include "namet.h"
44 #include "nlists.h"
45 #include "snames.h"
46 #include "stringt.h"
47 #include "uintp.h"
48 #include "fe.h"
49 #include "sinfo.h"
50 #include "einfo.h"
51 #include "ada-tree.h"
52 #include "gigi.h"
53
54 /* Return the base type of TYPE. */
55
56 tree
57 get_base_type (tree type)
58 {
59 if (TREE_CODE (type) == RECORD_TYPE
60 && TYPE_JUSTIFIED_MODULAR_P (type))
61 type = TREE_TYPE (TYPE_FIELDS (type));
62
63 while (TREE_TYPE (type)
64 && (TREE_CODE (type) == INTEGER_TYPE
65 || TREE_CODE (type) == REAL_TYPE))
66 type = TREE_TYPE (type);
67
68 return type;
69 }
70 \f
71 /* EXP is a GCC tree representing an address. See if we can find how
72 strictly the object at that address is aligned. Return that alignment
73 in bits. If we don't know anything about the alignment, return 0. */
74
75 unsigned int
76 known_alignment (tree exp)
77 {
78 unsigned int this_alignment;
79 unsigned int lhs, rhs;
80
81 switch (TREE_CODE (exp))
82 {
83 CASE_CONVERT:
84 case VIEW_CONVERT_EXPR:
85 case NON_LVALUE_EXPR:
86 /* Conversions between pointers and integers don't change the alignment
87 of the underlying object. */
88 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
89 break;
90
91 case COMPOUND_EXPR:
92 /* The value of a COMPOUND_EXPR is that of it's second operand. */
93 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
94 break;
95
96 case PLUS_EXPR:
97 case MINUS_EXPR:
98 /* If two address are added, the alignment of the result is the
99 minimum of the two alignments. */
100 lhs = known_alignment (TREE_OPERAND (exp, 0));
101 rhs = known_alignment (TREE_OPERAND (exp, 1));
102 this_alignment = MIN (lhs, rhs);
103 break;
104
105 case POINTER_PLUS_EXPR:
106 lhs = known_alignment (TREE_OPERAND (exp, 0));
107 rhs = known_alignment (TREE_OPERAND (exp, 1));
108 /* If we don't know the alignment of the offset, we assume that
109 of the base. */
110 if (rhs == 0)
111 this_alignment = lhs;
112 else
113 this_alignment = MIN (lhs, rhs);
114 break;
115
116 case COND_EXPR:
117 /* If there is a choice between two values, use the smallest one. */
118 lhs = known_alignment (TREE_OPERAND (exp, 1));
119 rhs = known_alignment (TREE_OPERAND (exp, 2));
120 this_alignment = MIN (lhs, rhs);
121 break;
122
123 case INTEGER_CST:
124 {
125 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
126 /* The first part of this represents the lowest bit in the constant,
127 but it is originally in bytes, not bits. */
128 this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
129 }
130 break;
131
132 case MULT_EXPR:
133 /* If we know the alignment of just one side, use it. Otherwise,
134 use the product of the alignments. */
135 lhs = known_alignment (TREE_OPERAND (exp, 0));
136 rhs = known_alignment (TREE_OPERAND (exp, 1));
137
138 if (lhs == 0)
139 this_alignment = rhs;
140 else if (rhs == 0)
141 this_alignment = lhs;
142 else
143 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
144 break;
145
146 case BIT_AND_EXPR:
147 /* A bit-and expression is as aligned as the maximum alignment of the
148 operands. We typically get here for a complex lhs and a constant
149 negative power of two on the rhs to force an explicit alignment, so
150 don't bother looking at the lhs. */
151 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
152 break;
153
154 case ADDR_EXPR:
155 this_alignment = expr_align (TREE_OPERAND (exp, 0));
156 break;
157
158 case CALL_EXPR:
159 {
160 tree t = maybe_inline_call_in_expr (exp);
161 if (t)
162 return known_alignment (t);
163 }
164
165 /* Fall through... */
166
167 default:
168 /* For other pointer expressions, we assume that the pointed-to object
169 is at least as aligned as the pointed-to type. Beware that we can
170 have a dummy type here (e.g. a Taft Amendment type), for which the
171 alignment is meaningless and should be ignored. */
172 if (POINTER_TYPE_P (TREE_TYPE (exp))
173 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
174 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
175 else
176 this_alignment = 0;
177 break;
178 }
179
180 return this_alignment;
181 }
182 \f
183 /* We have a comparison or assignment operation on two types, T1 and T2, which
184 are either both array types or both record types. T1 is assumed to be for
185 the left hand side operand, and T2 for the right hand side. Return the
186 type that both operands should be converted to for the operation, if any.
187 Otherwise return zero. */
188
189 static tree
190 find_common_type (tree t1, tree t2)
191 {
192 /* ??? As of today, various constructs lead to here with types of different
193 sizes even when both constants (e.g. tagged types, packable vs regular
194 component types, padded vs unpadded types, ...). While some of these
195 would better be handled upstream (types should be made consistent before
196 calling into build_binary_op), some others are really expected and we
197 have to be careful. */
198
199 /* We must avoid writing more than what the target can hold if this is for
200 an assignment and the case of tagged types is handled in build_binary_op
201 so we use the lhs type if it is known to be smaller or of constant size
202 and the rhs type is not, whatever the modes. We also force t1 in case of
203 constant size equality to minimize occurrences of view conversions on the
204 lhs of an assignment, except for the case of record types with a variant
205 part on the lhs but not on the rhs to make the conversion simpler. */
206 if (TREE_CONSTANT (TYPE_SIZE (t1))
207 && (!TREE_CONSTANT (TYPE_SIZE (t2))
208 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
209 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
210 && !(TREE_CODE (t1) == RECORD_TYPE
211 && TREE_CODE (t2) == RECORD_TYPE
212 && get_variant_part (t1) != NULL_TREE
213 && get_variant_part (t2) == NULL_TREE))))
214 return t1;
215
216 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
217 that we will not have any alignment problems since, if we did, the
218 non-BLKmode type could not have been used. */
219 if (TYPE_MODE (t1) != BLKmode)
220 return t1;
221
222 /* If the rhs type is of constant size, use it whatever the modes. At
223 this point it is known to be smaller, or of constant size and the
224 lhs type is not. */
225 if (TREE_CONSTANT (TYPE_SIZE (t2)))
226 return t2;
227
228 /* Otherwise, if the rhs type is non-BLKmode, use it. */
229 if (TYPE_MODE (t2) != BLKmode)
230 return t2;
231
232 /* In this case, both types have variable size and BLKmode. It's
233 probably best to leave the "type mismatch" because changing it
234 could cause a bad self-referential reference. */
235 return NULL_TREE;
236 }
237 \f
238 /* Return an expression tree representing an equality comparison of A1 and A2,
239 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
240
241 Two arrays are equal in one of two ways: (1) if both have zero length in
242 some dimension (not necessarily the same dimension) or (2) if the lengths
243 in each dimension are equal and the data is equal. We perform the length
244 tests in as efficient a manner as possible. */
245
246 static tree
247 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
248 {
249 tree result = convert (result_type, boolean_true_node);
250 tree a1_is_null = convert (result_type, boolean_false_node);
251 tree a2_is_null = convert (result_type, boolean_false_node);
252 tree t1 = TREE_TYPE (a1);
253 tree t2 = TREE_TYPE (a2);
254 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
255 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
256 bool length_zero_p = false;
257
258 /* If either operand has side-effects, they have to be evaluated only once
259 in spite of the multiple references to the operand in the comparison. */
260 if (a1_side_effects_p)
261 a1 = gnat_protect_expr (a1);
262
263 if (a2_side_effects_p)
264 a2 = gnat_protect_expr (a2);
265
266 /* Process each dimension separately and compare the lengths. If any
267 dimension has a length known to be zero, set LENGTH_ZERO_P to true
268 in order to suppress the comparison of the data at the end. */
269 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
270 {
271 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
272 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
273 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
274 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
275 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
276 size_one_node);
277 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
278 size_one_node);
279 tree comparison, this_a1_is_null, this_a2_is_null;
280
281 /* If the length of the first array is a constant, swap our operands
282 unless the length of the second array is the constant zero. */
283 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
284 {
285 tree tem;
286 bool btem;
287
288 tem = a1, a1 = a2, a2 = tem;
289 tem = t1, t1 = t2, t2 = tem;
290 tem = lb1, lb1 = lb2, lb2 = tem;
291 tem = ub1, ub1 = ub2, ub2 = tem;
292 tem = length1, length1 = length2, length2 = tem;
293 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
294 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
295 a2_side_effects_p = btem;
296 }
297
298 /* If the length of the second array is the constant zero, we can just
299 use the original stored bounds for the first array and see whether
300 last < first holds. */
301 if (integer_zerop (length2))
302 {
303 length_zero_p = true;
304
305 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
306 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
307
308 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
309 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
310 if (EXPR_P (comparison))
311 SET_EXPR_LOCATION (comparison, loc);
312
313 this_a1_is_null = comparison;
314 this_a2_is_null = convert (result_type, boolean_true_node);
315 }
316
317 /* Otherwise, if the length is some other constant value, we know that
318 this dimension in the second array cannot be superflat, so we can
319 just use its length computed from the actual stored bounds. */
320 else if (TREE_CODE (length2) == INTEGER_CST)
321 {
322 tree bt;
323
324 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
325 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
326 /* Note that we know that UB2 and LB2 are constant and hence
327 cannot contain a PLACEHOLDER_EXPR. */
328 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
329 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
330 bt = get_base_type (TREE_TYPE (ub1));
331
332 comparison
333 = fold_build2_loc (loc, EQ_EXPR, result_type,
334 build_binary_op (MINUS_EXPR, bt, ub1, lb1),
335 build_binary_op (MINUS_EXPR, bt, ub2, lb2));
336 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
337 if (EXPR_P (comparison))
338 SET_EXPR_LOCATION (comparison, loc);
339
340 this_a1_is_null
341 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
342
343 this_a2_is_null = convert (result_type, boolean_false_node);
344 }
345
346 /* Otherwise, compare the computed lengths. */
347 else
348 {
349 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
350 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
351
352 comparison
353 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
354
355 /* If the length expression is of the form (cond ? val : 0), assume
356 that cond is equivalent to (length != 0). That's guaranteed by
357 construction of the array types in gnat_to_gnu_entity. */
358 if (TREE_CODE (length1) == COND_EXPR
359 && integer_zerop (TREE_OPERAND (length1, 2)))
360 this_a1_is_null
361 = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
362 else
363 this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
364 length1, size_zero_node);
365
366 /* Likewise for the second array. */
367 if (TREE_CODE (length2) == COND_EXPR
368 && integer_zerop (TREE_OPERAND (length2, 2)))
369 this_a2_is_null
370 = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
371 else
372 this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
373 length2, size_zero_node);
374 }
375
376 /* Append expressions for this dimension to the final expressions. */
377 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
378 result, comparison);
379
380 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
381 this_a1_is_null, a1_is_null);
382
383 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
384 this_a2_is_null, a2_is_null);
385
386 t1 = TREE_TYPE (t1);
387 t2 = TREE_TYPE (t2);
388 }
389
390 /* Unless the length of some dimension is known to be zero, compare the
391 data in the array. */
392 if (!length_zero_p)
393 {
394 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
395 tree comparison;
396
397 if (type)
398 {
399 a1 = convert (type, a1),
400 a2 = convert (type, a2);
401 }
402
403 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
404
405 result
406 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
407 }
408
409 /* The result is also true if both sizes are zero. */
410 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
411 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
412 a1_is_null, a2_is_null),
413 result);
414
415 /* If either operand has side-effects, they have to be evaluated before
416 starting the comparison above since the place they would be otherwise
417 evaluated could be wrong. */
418 if (a1_side_effects_p)
419 result = build2 (COMPOUND_EXPR, result_type, a1, result);
420
421 if (a2_side_effects_p)
422 result = build2 (COMPOUND_EXPR, result_type, a2, result);
423
424 return result;
425 }
426
427 /* Return an expression tree representing an equality comparison of P1 and P2,
428 two objects of fat pointer type. The result should be of type RESULT_TYPE.
429
430 Two fat pointers are equal in one of two ways: (1) if both have a null
431 pointer to the array or (2) if they contain the same couple of pointers.
432 We perform the comparison in as efficient a manner as possible. */
433
434 static tree
435 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
436 {
437 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
438 tree p1_array_is_null, p2_array_is_null;
439
440 /* If either operand has side-effects, they have to be evaluated only once
441 in spite of the multiple references to the operand in the comparison. */
442 p1 = gnat_protect_expr (p1);
443 p2 = gnat_protect_expr (p2);
444
445 /* The constant folder doesn't fold fat pointer types so we do it here. */
446 if (TREE_CODE (p1) == CONSTRUCTOR)
447 p1_array = (*CONSTRUCTOR_ELTS (p1))[0].value;
448 else
449 p1_array = build_component_ref (p1, NULL_TREE,
450 TYPE_FIELDS (TREE_TYPE (p1)), true);
451
452 p1_array_is_null
453 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
454 fold_convert_loc (loc, TREE_TYPE (p1_array),
455 null_pointer_node));
456
457 if (TREE_CODE (p2) == CONSTRUCTOR)
458 p2_array = (*CONSTRUCTOR_ELTS (p2))[0].value;
459 else
460 p2_array = build_component_ref (p2, NULL_TREE,
461 TYPE_FIELDS (TREE_TYPE (p2)), true);
462
463 p2_array_is_null
464 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
465 fold_convert_loc (loc, TREE_TYPE (p2_array),
466 null_pointer_node));
467
468 /* If one of the pointers to the array is null, just compare the other. */
469 if (integer_zerop (p1_array))
470 return p2_array_is_null;
471 else if (integer_zerop (p2_array))
472 return p1_array_is_null;
473
474 /* Otherwise, do the fully-fledged comparison. */
475 same_array
476 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
477
478 if (TREE_CODE (p1) == CONSTRUCTOR)
479 p1_bounds = (*CONSTRUCTOR_ELTS (p1))[1].value;
480 else
481 p1_bounds
482 = build_component_ref (p1, NULL_TREE,
483 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))), true);
484
485 if (TREE_CODE (p2) == CONSTRUCTOR)
486 p2_bounds = (*CONSTRUCTOR_ELTS (p2))[1].value;
487 else
488 p2_bounds
489 = build_component_ref (p2, NULL_TREE,
490 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))), true);
491
492 same_bounds
493 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
494
495 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
496 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
497 build_binary_op (TRUTH_ORIF_EXPR, result_type,
498 p1_array_is_null, same_bounds));
499 }
500 \f
501 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
502 type TYPE. We know that TYPE is a modular type with a nonbinary
503 modulus. */
504
505 static tree
506 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
507 tree rhs)
508 {
509 tree modulus = TYPE_MODULUS (type);
510 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
511 unsigned int precision;
512 bool unsignedp = true;
513 tree op_type = type;
514 tree result;
515
516 /* If this is an addition of a constant, convert it to a subtraction
517 of a constant since we can do that faster. */
518 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
519 {
520 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
521 op_code = MINUS_EXPR;
522 }
523
524 /* For the logical operations, we only need PRECISION bits. For
525 addition and subtraction, we need one more and for multiplication we
526 need twice as many. But we never want to make a size smaller than
527 our size. */
528 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
529 needed_precision += 1;
530 else if (op_code == MULT_EXPR)
531 needed_precision *= 2;
532
533 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
534
535 /* Unsigned will do for everything but subtraction. */
536 if (op_code == MINUS_EXPR)
537 unsignedp = false;
538
539 /* If our type is the wrong signedness or isn't wide enough, make a new
540 type and convert both our operands to it. */
541 if (TYPE_PRECISION (op_type) < precision
542 || TYPE_UNSIGNED (op_type) != unsignedp)
543 {
544 /* Copy the node so we ensure it can be modified to make it modular. */
545 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
546 modulus = convert (op_type, modulus);
547 SET_TYPE_MODULUS (op_type, modulus);
548 TYPE_MODULAR_P (op_type) = 1;
549 lhs = convert (op_type, lhs);
550 rhs = convert (op_type, rhs);
551 }
552
553 /* Do the operation, then we'll fix it up. */
554 result = fold_build2 (op_code, op_type, lhs, rhs);
555
556 /* For multiplication, we have no choice but to do a full modulus
557 operation. However, we want to do this in the narrowest
558 possible size. */
559 if (op_code == MULT_EXPR)
560 {
561 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
562 modulus = convert (div_type, modulus);
563 SET_TYPE_MODULUS (div_type, modulus);
564 TYPE_MODULAR_P (div_type) = 1;
565 result = convert (op_type,
566 fold_build2 (TRUNC_MOD_EXPR, div_type,
567 convert (div_type, result), modulus));
568 }
569
570 /* For subtraction, add the modulus back if we are negative. */
571 else if (op_code == MINUS_EXPR)
572 {
573 result = gnat_protect_expr (result);
574 result = fold_build3 (COND_EXPR, op_type,
575 fold_build2 (LT_EXPR, boolean_type_node, result,
576 convert (op_type, integer_zero_node)),
577 fold_build2 (PLUS_EXPR, op_type, result, modulus),
578 result);
579 }
580
581 /* For the other operations, subtract the modulus if we are >= it. */
582 else
583 {
584 result = gnat_protect_expr (result);
585 result = fold_build3 (COND_EXPR, op_type,
586 fold_build2 (GE_EXPR, boolean_type_node,
587 result, modulus),
588 fold_build2 (MINUS_EXPR, op_type,
589 result, modulus),
590 result);
591 }
592
593 return convert (type, result);
594 }
595 \f
596 /* This page contains routines that implement the Ada semantics with regard
597 to atomic objects. They are fully piggybacked on the middle-end support
598 for atomic loads and stores.
599
600 *** Memory barriers and volatile objects ***
601
602 We implement the weakened form of the C.6(16) clause that was introduced
603 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
604 implementable without significant performance hits on modern platforms.
605
606 We also take advantage of the requirements imposed on shared variables by
607 9.10 (conditions for sequential actions) to have non-erroneous execution
608 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
609 volatile updates with regard to sequential actions, i.e. with regard to
610 reads or updates of atomic objects.
611
612 As such, an update of an atomic object by a task requires that all earlier
613 accesses to volatile objects have completed. Similarly, later accesses to
614 volatile objects cannot be reordered before the update of the atomic object.
615 So, memory barriers both before and after the atomic update are needed.
616
617 For a read of an atomic object, to avoid seeing writes of volatile objects
618 by a task earlier than by the other tasks, a memory barrier is needed before
619 the atomic read. Finally, to avoid reordering later reads or updates of
620 volatile objects to before the atomic read, a barrier is needed after the
621 atomic read.
622
623 So, memory barriers are needed before and after atomic reads and updates.
624 And, in order to simplify the implementation, we use full memory barriers
625 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
626
627 /* Return the size of TYPE, which must be a positive power of 2. */
628
629 static unsigned int
630 resolve_atomic_size (tree type)
631 {
632 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
633
634 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
635 return size;
636
637 /* We shouldn't reach here without having already detected that the size
638 isn't compatible with an atomic access. */
639 gcc_assert (Serious_Errors_Detected);
640
641 return 0;
642 }
643
644 /* Build an atomic load for the underlying atomic object in SRC. */
645
646 tree
647 build_atomic_load (tree src)
648 {
649 tree ptr_type
650 = build_pointer_type
651 (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
652 tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
653 tree orig_src = src;
654 tree t, addr, val;
655 unsigned int size;
656 int fncode;
657
658 /* Remove conversions to get the address of the underlying object. */
659 src = remove_conversions (src, false);
660 size = resolve_atomic_size (TREE_TYPE (src));
661 if (size == 0)
662 return orig_src;
663
664 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
665 t = builtin_decl_implicit ((enum built_in_function) fncode);
666
667 addr = build_unary_op (ADDR_EXPR, ptr_type, src);
668 val = build_call_expr (t, 2, addr, mem_model);
669
670 /* First reinterpret the loaded bits in the original type of the load,
671 then convert to the expected result type. */
672 t = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (src), val);
673 return convert (TREE_TYPE (orig_src), t);
674 }
675
676 /* Build an atomic store from SRC to the underlying atomic object in DEST. */
677
678 tree
679 build_atomic_store (tree dest, tree src)
680 {
681 tree ptr_type
682 = build_pointer_type
683 (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
684 tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
685 tree orig_dest = dest;
686 tree t, int_type, addr;
687 unsigned int size;
688 int fncode;
689
690 /* Remove conversions to get the address of the underlying object. */
691 dest = remove_conversions (dest, false);
692 size = resolve_atomic_size (TREE_TYPE (dest));
693 if (size == 0)
694 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
695
696 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
697 t = builtin_decl_implicit ((enum built_in_function) fncode);
698 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
699
700 /* First convert the bits to be stored to the original type of the store,
701 then reinterpret them in the effective type. But if the original type
702 is a padded type with the same size, convert to the inner type instead,
703 as we don't want to artificially introduce a CONSTRUCTOR here. */
704 if (TYPE_IS_PADDING_P (TREE_TYPE (dest))
705 && TYPE_SIZE (TREE_TYPE (dest))
706 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest)))))
707 src = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest))), src);
708 else
709 src = convert (TREE_TYPE (dest), src);
710 src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
711 addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
712
713 return build_call_expr (t, 3, addr, src, mem_model);
714 }
715 \f
716 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
717 desired for the result. Usually the operation is to be performed
718 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
719 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
720 case the type to be used will be derived from the operands.
721
722 This function is very much unlike the ones for C and C++ since we
723 have already done any type conversion and matching required. All we
724 have to do here is validate the work done by SEM and handle subtypes. */
725
726 tree
727 build_binary_op (enum tree_code op_code, tree result_type,
728 tree left_operand, tree right_operand)
729 {
730 tree left_type = TREE_TYPE (left_operand);
731 tree right_type = TREE_TYPE (right_operand);
732 tree left_base_type = get_base_type (left_type);
733 tree right_base_type = get_base_type (right_type);
734 tree operation_type = result_type;
735 tree best_type = NULL_TREE;
736 tree modulus, result;
737 bool has_side_effects = false;
738
739 if (operation_type
740 && TREE_CODE (operation_type) == RECORD_TYPE
741 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
742 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
743
744 if (operation_type
745 && TREE_CODE (operation_type) == INTEGER_TYPE
746 && TYPE_EXTRA_SUBTYPE_P (operation_type))
747 operation_type = get_base_type (operation_type);
748
749 modulus = (operation_type
750 && TREE_CODE (operation_type) == INTEGER_TYPE
751 && TYPE_MODULAR_P (operation_type)
752 ? TYPE_MODULUS (operation_type) : NULL_TREE);
753
754 switch (op_code)
755 {
756 case INIT_EXPR:
757 case MODIFY_EXPR:
758 #ifdef ENABLE_CHECKING
759 gcc_assert (result_type == NULL_TREE);
760 #endif
761 /* If there were integral or pointer conversions on the LHS, remove
762 them; we'll be putting them back below if needed. Likewise for
763 conversions between array and record types, except for justified
764 modular types. But don't do this if the right operand is not
765 BLKmode (for packed arrays) unless we are not changing the mode. */
766 while ((CONVERT_EXPR_P (left_operand)
767 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
768 && (((INTEGRAL_TYPE_P (left_type)
769 || POINTER_TYPE_P (left_type))
770 && (INTEGRAL_TYPE_P (TREE_TYPE
771 (TREE_OPERAND (left_operand, 0)))
772 || POINTER_TYPE_P (TREE_TYPE
773 (TREE_OPERAND (left_operand, 0)))))
774 || (((TREE_CODE (left_type) == RECORD_TYPE
775 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
776 || TREE_CODE (left_type) == ARRAY_TYPE)
777 && ((TREE_CODE (TREE_TYPE
778 (TREE_OPERAND (left_operand, 0)))
779 == RECORD_TYPE)
780 || (TREE_CODE (TREE_TYPE
781 (TREE_OPERAND (left_operand, 0)))
782 == ARRAY_TYPE))
783 && (TYPE_MODE (right_type) == BLKmode
784 || (TYPE_MODE (left_type)
785 == TYPE_MODE (TREE_TYPE
786 (TREE_OPERAND
787 (left_operand, 0))))))))
788 {
789 left_operand = TREE_OPERAND (left_operand, 0);
790 left_type = TREE_TYPE (left_operand);
791 }
792
793 /* If a class-wide type may be involved, force use of the RHS type. */
794 if ((TREE_CODE (right_type) == RECORD_TYPE
795 || TREE_CODE (right_type) == UNION_TYPE)
796 && TYPE_ALIGN_OK (right_type))
797 operation_type = right_type;
798
799 /* If we are copying between padded objects with compatible types, use
800 the padded view of the objects, this is very likely more efficient.
801 Likewise for a padded object that is assigned a constructor, if we
802 can convert the constructor to the inner type, to avoid putting a
803 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
804 actually copied anything. */
805 else if (TYPE_IS_PADDING_P (left_type)
806 && TREE_CONSTANT (TYPE_SIZE (left_type))
807 && ((TREE_CODE (right_operand) == COMPONENT_REF
808 && TYPE_MAIN_VARIANT (left_type)
809 == TYPE_MAIN_VARIANT
810 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
811 || (TREE_CODE (right_operand) == CONSTRUCTOR
812 && !CONTAINS_PLACEHOLDER_P
813 (DECL_SIZE (TYPE_FIELDS (left_type)))))
814 && !integer_zerop (TYPE_SIZE (right_type)))
815 {
816 /* We make an exception for a BLKmode type padding a non-BLKmode
817 inner type and do the conversion of the LHS right away, since
818 unchecked_convert wouldn't do it properly. */
819 if (TYPE_MODE (left_type) == BLKmode
820 && TYPE_MODE (right_type) != BLKmode
821 && TREE_CODE (right_operand) != CONSTRUCTOR)
822 {
823 operation_type = right_type;
824 left_operand = convert (operation_type, left_operand);
825 left_type = operation_type;
826 }
827 else
828 operation_type = left_type;
829 }
830
831 /* If we have a call to a function that returns an unconstrained type
832 with default discriminant on the RHS, use the RHS type (which is
833 padded) as we cannot compute the size of the actual assignment. */
834 else if (TREE_CODE (right_operand) == CALL_EXPR
835 && TYPE_IS_PADDING_P (right_type)
836 && CONTAINS_PLACEHOLDER_P
837 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (right_type)))))
838 operation_type = right_type;
839
840 /* Find the best type to use for copying between aggregate types. */
841 else if (((TREE_CODE (left_type) == ARRAY_TYPE
842 && TREE_CODE (right_type) == ARRAY_TYPE)
843 || (TREE_CODE (left_type) == RECORD_TYPE
844 && TREE_CODE (right_type) == RECORD_TYPE))
845 && (best_type = find_common_type (left_type, right_type)))
846 operation_type = best_type;
847
848 /* Otherwise use the LHS type. */
849 else
850 operation_type = left_type;
851
852 /* Ensure everything on the LHS is valid. If we have a field reference,
853 strip anything that get_inner_reference can handle. Then remove any
854 conversions between types having the same code and mode. And mark
855 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
856 either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
857 result = left_operand;
858 while (true)
859 {
860 tree restype = TREE_TYPE (result);
861
862 if (TREE_CODE (result) == COMPONENT_REF
863 || TREE_CODE (result) == ARRAY_REF
864 || TREE_CODE (result) == ARRAY_RANGE_REF)
865 while (handled_component_p (result))
866 result = TREE_OPERAND (result, 0);
867 else if (TREE_CODE (result) == REALPART_EXPR
868 || TREE_CODE (result) == IMAGPART_EXPR
869 || (CONVERT_EXPR_P (result)
870 && (((TREE_CODE (restype)
871 == TREE_CODE (TREE_TYPE
872 (TREE_OPERAND (result, 0))))
873 && (TYPE_MODE (TREE_TYPE
874 (TREE_OPERAND (result, 0)))
875 == TYPE_MODE (restype)))
876 || TYPE_ALIGN_OK (restype))))
877 result = TREE_OPERAND (result, 0);
878 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
879 {
880 TREE_ADDRESSABLE (result) = 1;
881 result = TREE_OPERAND (result, 0);
882 }
883 else
884 break;
885 }
886
887 gcc_assert (TREE_CODE (result) == INDIRECT_REF
888 || TREE_CODE (result) == NULL_EXPR
889 || DECL_P (result));
890
891 /* Convert the right operand to the operation type unless it is
892 either already of the correct type or if the type involves a
893 placeholder, since the RHS may not have the same record type. */
894 if (operation_type != right_type
895 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
896 {
897 right_operand = convert (operation_type, right_operand);
898 right_type = operation_type;
899 }
900
901 /* If the left operand is not of the same type as the operation
902 type, wrap it up in a VIEW_CONVERT_EXPR. */
903 if (left_type != operation_type)
904 left_operand = unchecked_convert (operation_type, left_operand, false);
905
906 has_side_effects = true;
907 modulus = NULL_TREE;
908 break;
909
910 case ARRAY_REF:
911 if (!operation_type)
912 operation_type = TREE_TYPE (left_type);
913
914 /* ... fall through ... */
915
916 case ARRAY_RANGE_REF:
917 /* First look through conversion between type variants. Note that
918 this changes neither the operation type nor the type domain. */
919 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
920 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
921 == TYPE_MAIN_VARIANT (left_type))
922 {
923 left_operand = TREE_OPERAND (left_operand, 0);
924 left_type = TREE_TYPE (left_operand);
925 }
926
927 /* For a range, make sure the element type is consistent. */
928 if (op_code == ARRAY_RANGE_REF
929 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
930 operation_type = build_array_type (TREE_TYPE (left_type),
931 TYPE_DOMAIN (operation_type));
932
933 /* Then convert the right operand to its base type. This will prevent
934 unneeded sign conversions when sizetype is wider than integer. */
935 right_operand = convert (right_base_type, right_operand);
936 right_operand = convert_to_index_type (right_operand);
937 modulus = NULL_TREE;
938 break;
939
940 case TRUTH_ANDIF_EXPR:
941 case TRUTH_ORIF_EXPR:
942 case TRUTH_AND_EXPR:
943 case TRUTH_OR_EXPR:
944 case TRUTH_XOR_EXPR:
945 #ifdef ENABLE_CHECKING
946 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
947 #endif
948 operation_type = left_base_type;
949 left_operand = convert (operation_type, left_operand);
950 right_operand = convert (operation_type, right_operand);
951 break;
952
953 case GE_EXPR:
954 case LE_EXPR:
955 case GT_EXPR:
956 case LT_EXPR:
957 case EQ_EXPR:
958 case NE_EXPR:
959 #ifdef ENABLE_CHECKING
960 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
961 #endif
962 /* If either operand is a NULL_EXPR, just return a new one. */
963 if (TREE_CODE (left_operand) == NULL_EXPR)
964 return build2 (op_code, result_type,
965 build1 (NULL_EXPR, integer_type_node,
966 TREE_OPERAND (left_operand, 0)),
967 integer_zero_node);
968
969 else if (TREE_CODE (right_operand) == NULL_EXPR)
970 return build2 (op_code, result_type,
971 build1 (NULL_EXPR, integer_type_node,
972 TREE_OPERAND (right_operand, 0)),
973 integer_zero_node);
974
975 /* If either object is a justified modular types, get the
976 fields from within. */
977 if (TREE_CODE (left_type) == RECORD_TYPE
978 && TYPE_JUSTIFIED_MODULAR_P (left_type))
979 {
980 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
981 left_operand);
982 left_type = TREE_TYPE (left_operand);
983 left_base_type = get_base_type (left_type);
984 }
985
986 if (TREE_CODE (right_type) == RECORD_TYPE
987 && TYPE_JUSTIFIED_MODULAR_P (right_type))
988 {
989 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
990 right_operand);
991 right_type = TREE_TYPE (right_operand);
992 right_base_type = get_base_type (right_type);
993 }
994
995 /* If both objects are arrays, compare them specially. */
996 if ((TREE_CODE (left_type) == ARRAY_TYPE
997 || (TREE_CODE (left_type) == INTEGER_TYPE
998 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
999 && (TREE_CODE (right_type) == ARRAY_TYPE
1000 || (TREE_CODE (right_type) == INTEGER_TYPE
1001 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1002 {
1003 result = compare_arrays (input_location,
1004 result_type, left_operand, right_operand);
1005 if (op_code == NE_EXPR)
1006 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1007 else
1008 gcc_assert (op_code == EQ_EXPR);
1009
1010 return result;
1011 }
1012
1013 /* Otherwise, the base types must be the same, unless they are both fat
1014 pointer types or record types. In the latter case, use the best type
1015 and convert both operands to that type. */
1016 if (left_base_type != right_base_type)
1017 {
1018 if (TYPE_IS_FAT_POINTER_P (left_base_type)
1019 && TYPE_IS_FAT_POINTER_P (right_base_type))
1020 {
1021 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1022 == TYPE_MAIN_VARIANT (right_base_type));
1023 best_type = left_base_type;
1024 }
1025
1026 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1027 && TREE_CODE (right_base_type) == RECORD_TYPE)
1028 {
1029 /* The only way this is permitted is if both types have the same
1030 name. In that case, one of them must not be self-referential.
1031 Use it as the best type. Even better with a fixed size. */
1032 gcc_assert (TYPE_NAME (left_base_type)
1033 && TYPE_NAME (left_base_type)
1034 == TYPE_NAME (right_base_type));
1035
1036 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1037 best_type = left_base_type;
1038 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1039 best_type = right_base_type;
1040 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1041 best_type = left_base_type;
1042 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1043 best_type = right_base_type;
1044 else
1045 gcc_unreachable ();
1046 }
1047
1048 else
1049 gcc_unreachable ();
1050
1051 left_operand = convert (best_type, left_operand);
1052 right_operand = convert (best_type, right_operand);
1053 }
1054 else
1055 {
1056 left_operand = convert (left_base_type, left_operand);
1057 right_operand = convert (right_base_type, right_operand);
1058 }
1059
1060 /* If both objects are fat pointers, compare them specially. */
1061 if (TYPE_IS_FAT_POINTER_P (left_base_type))
1062 {
1063 result
1064 = compare_fat_pointers (input_location,
1065 result_type, left_operand, right_operand);
1066 if (op_code == NE_EXPR)
1067 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1068 else
1069 gcc_assert (op_code == EQ_EXPR);
1070
1071 return result;
1072 }
1073
1074 modulus = NULL_TREE;
1075 break;
1076
1077 case LSHIFT_EXPR:
1078 case RSHIFT_EXPR:
1079 case LROTATE_EXPR:
1080 case RROTATE_EXPR:
1081 /* The RHS of a shift can be any type. Also, ignore any modulus
1082 (we used to abort, but this is needed for unchecked conversion
1083 to modular types). Otherwise, processing is the same as normal. */
1084 gcc_assert (operation_type == left_base_type);
1085 modulus = NULL_TREE;
1086 left_operand = convert (operation_type, left_operand);
1087 break;
1088
1089 case BIT_AND_EXPR:
1090 case BIT_IOR_EXPR:
1091 case BIT_XOR_EXPR:
1092 /* For binary modulus, if the inputs are in range, so are the
1093 outputs. */
1094 if (modulus && integer_pow2p (modulus))
1095 modulus = NULL_TREE;
1096 goto common;
1097
1098 case COMPLEX_EXPR:
1099 gcc_assert (TREE_TYPE (result_type) == left_base_type
1100 && TREE_TYPE (result_type) == right_base_type);
1101 left_operand = convert (left_base_type, left_operand);
1102 right_operand = convert (right_base_type, right_operand);
1103 break;
1104
1105 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1106 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1107 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1108 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1109 /* These always produce results lower than either operand. */
1110 modulus = NULL_TREE;
1111 goto common;
1112
1113 case POINTER_PLUS_EXPR:
1114 gcc_assert (operation_type == left_base_type
1115 && sizetype == right_base_type);
1116 left_operand = convert (operation_type, left_operand);
1117 right_operand = convert (sizetype, right_operand);
1118 break;
1119
1120 case PLUS_NOMOD_EXPR:
1121 case MINUS_NOMOD_EXPR:
1122 if (op_code == PLUS_NOMOD_EXPR)
1123 op_code = PLUS_EXPR;
1124 else
1125 op_code = MINUS_EXPR;
1126 modulus = NULL_TREE;
1127
1128 /* ... fall through ... */
1129
1130 case PLUS_EXPR:
1131 case MINUS_EXPR:
1132 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1133 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1134 these types but can generate addition/subtraction for Succ/Pred. */
1135 if (operation_type
1136 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1137 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1138 operation_type = left_base_type = right_base_type
1139 = gnat_type_for_mode (TYPE_MODE (operation_type),
1140 TYPE_UNSIGNED (operation_type));
1141
1142 /* ... fall through ... */
1143
1144 default:
1145 common:
1146 /* The result type should be the same as the base types of the
1147 both operands (and they should be the same). Convert
1148 everything to the result type. */
1149
1150 gcc_assert (operation_type == left_base_type
1151 && left_base_type == right_base_type);
1152 left_operand = convert (operation_type, left_operand);
1153 right_operand = convert (operation_type, right_operand);
1154 }
1155
1156 if (modulus && !integer_pow2p (modulus))
1157 {
1158 result = nonbinary_modular_operation (op_code, operation_type,
1159 left_operand, right_operand);
1160 modulus = NULL_TREE;
1161 }
1162 /* If either operand is a NULL_EXPR, just return a new one. */
1163 else if (TREE_CODE (left_operand) == NULL_EXPR)
1164 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1165 else if (TREE_CODE (right_operand) == NULL_EXPR)
1166 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1167 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1168 result = fold (build4 (op_code, operation_type, left_operand,
1169 right_operand, NULL_TREE, NULL_TREE));
1170 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1171 result = build2 (op_code, void_type_node, left_operand, right_operand);
1172 else
1173 result
1174 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1175
1176 if (TREE_CONSTANT (result))
1177 ;
1178 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1179 {
1180 TREE_THIS_NOTRAP (result) = 1;
1181 if (TYPE_VOLATILE (operation_type))
1182 TREE_THIS_VOLATILE (result) = 1;
1183 }
1184 else
1185 TREE_CONSTANT (result)
1186 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1187
1188 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1189
1190 /* If we are working with modular types, perform the MOD operation
1191 if something above hasn't eliminated the need for it. */
1192 if (modulus)
1193 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1194 convert (operation_type, modulus));
1195
1196 if (result_type && result_type != operation_type)
1197 result = convert (result_type, result);
1198
1199 return result;
1200 }
1201 \f
1202 /* Similar, but for unary operations. */
1203
1204 tree
1205 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1206 {
1207 tree type = TREE_TYPE (operand);
1208 tree base_type = get_base_type (type);
1209 tree operation_type = result_type;
1210 tree result;
1211
1212 if (operation_type
1213 && TREE_CODE (operation_type) == RECORD_TYPE
1214 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1215 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1216
1217 if (operation_type
1218 && TREE_CODE (operation_type) == INTEGER_TYPE
1219 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1220 operation_type = get_base_type (operation_type);
1221
1222 switch (op_code)
1223 {
1224 case REALPART_EXPR:
1225 case IMAGPART_EXPR:
1226 if (!operation_type)
1227 result_type = operation_type = TREE_TYPE (type);
1228 else
1229 gcc_assert (result_type == TREE_TYPE (type));
1230
1231 result = fold_build1 (op_code, operation_type, operand);
1232 break;
1233
1234 case TRUTH_NOT_EXPR:
1235 #ifdef ENABLE_CHECKING
1236 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1237 #endif
1238 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1239 /* When not optimizing, fold the result as invert_truthvalue_loc
1240 doesn't fold the result of comparisons. This is intended to undo
1241 the trick used for boolean rvalues in gnat_to_gnu. */
1242 if (!optimize)
1243 result = fold (result);
1244 break;
1245
1246 case ATTR_ADDR_EXPR:
1247 case ADDR_EXPR:
1248 switch (TREE_CODE (operand))
1249 {
1250 case INDIRECT_REF:
1251 case UNCONSTRAINED_ARRAY_REF:
1252 result = TREE_OPERAND (operand, 0);
1253
1254 /* Make sure the type here is a pointer, not a reference.
1255 GCC wants pointer types for function addresses. */
1256 if (!result_type)
1257 result_type = build_pointer_type (type);
1258
1259 /* If the underlying object can alias everything, propagate the
1260 property since we are effectively retrieving the object. */
1261 if (POINTER_TYPE_P (TREE_TYPE (result))
1262 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1263 {
1264 if (TREE_CODE (result_type) == POINTER_TYPE
1265 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1266 result_type
1267 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1268 TYPE_MODE (result_type),
1269 true);
1270 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1271 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1272 result_type
1273 = build_reference_type_for_mode (TREE_TYPE (result_type),
1274 TYPE_MODE (result_type),
1275 true);
1276 }
1277 break;
1278
1279 case NULL_EXPR:
1280 result = operand;
1281 TREE_TYPE (result) = type = build_pointer_type (type);
1282 break;
1283
1284 case COMPOUND_EXPR:
1285 /* Fold a compound expression if it has unconstrained array type
1286 since the middle-end cannot handle it. But we don't it in the
1287 general case because it may introduce aliasing issues if the
1288 first operand is an indirect assignment and the second operand
1289 the corresponding address, e.g. for an allocator. */
1290 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1291 {
1292 result = build_unary_op (ADDR_EXPR, result_type,
1293 TREE_OPERAND (operand, 1));
1294 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1295 TREE_OPERAND (operand, 0), result);
1296 break;
1297 }
1298 goto common;
1299
1300 case ARRAY_REF:
1301 case ARRAY_RANGE_REF:
1302 case COMPONENT_REF:
1303 case BIT_FIELD_REF:
1304 /* If this is for 'Address, find the address of the prefix and add
1305 the offset to the field. Otherwise, do this the normal way. */
1306 if (op_code == ATTR_ADDR_EXPR)
1307 {
1308 HOST_WIDE_INT bitsize;
1309 HOST_WIDE_INT bitpos;
1310 tree offset, inner;
1311 enum machine_mode mode;
1312 int unsignedp, volatilep;
1313
1314 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1315 &mode, &unsignedp, &volatilep,
1316 false);
1317
1318 /* If INNER is a padding type whose field has a self-referential
1319 size, convert to that inner type. We know the offset is zero
1320 and we need to have that type visible. */
1321 if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
1322 && CONTAINS_PLACEHOLDER_P
1323 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1324 (TREE_TYPE (inner))))))
1325 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1326 inner);
1327
1328 /* Compute the offset as a byte offset from INNER. */
1329 if (!offset)
1330 offset = size_zero_node;
1331
1332 offset = size_binop (PLUS_EXPR, offset,
1333 size_int (bitpos / BITS_PER_UNIT));
1334
1335 /* Take the address of INNER, convert the offset to void *, and
1336 add then. It will later be converted to the desired result
1337 type, if any. */
1338 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1339 inner = convert (ptr_void_type_node, inner);
1340 result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1341 inner, offset);
1342 result = convert (build_pointer_type (TREE_TYPE (operand)),
1343 result);
1344 break;
1345 }
1346 goto common;
1347
1348 case CONSTRUCTOR:
1349 /* If this is just a constructor for a padded record, we can
1350 just take the address of the single field and convert it to
1351 a pointer to our type. */
1352 if (TYPE_IS_PADDING_P (type))
1353 {
1354 result = (*CONSTRUCTOR_ELTS (operand))[0].value;
1355 result = convert (build_pointer_type (TREE_TYPE (operand)),
1356 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1357 break;
1358 }
1359
1360 goto common;
1361
1362 case NOP_EXPR:
1363 if (AGGREGATE_TYPE_P (type)
1364 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1365 return build_unary_op (ADDR_EXPR, result_type,
1366 TREE_OPERAND (operand, 0));
1367
1368 /* ... fallthru ... */
1369
1370 case VIEW_CONVERT_EXPR:
1371 /* If this just a variant conversion or if the conversion doesn't
1372 change the mode, get the result type from this type and go down.
1373 This is needed for conversions of CONST_DECLs, to eventually get
1374 to the address of their CORRESPONDING_VARs. */
1375 if ((TYPE_MAIN_VARIANT (type)
1376 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1377 || (TYPE_MODE (type) != BLKmode
1378 && (TYPE_MODE (type)
1379 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1380 return build_unary_op (ADDR_EXPR,
1381 (result_type ? result_type
1382 : build_pointer_type (type)),
1383 TREE_OPERAND (operand, 0));
1384 goto common;
1385
1386 case CONST_DECL:
1387 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1388
1389 /* ... fall through ... */
1390
1391 default:
1392 common:
1393
1394 /* If we are taking the address of a padded record whose field
1395 contains a template, take the address of the field. */
1396 if (TYPE_IS_PADDING_P (type)
1397 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1398 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1399 {
1400 type = TREE_TYPE (TYPE_FIELDS (type));
1401 operand = convert (type, operand);
1402 }
1403
1404 gnat_mark_addressable (operand);
1405 result = build_fold_addr_expr (operand);
1406 }
1407
1408 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1409 break;
1410
1411 case INDIRECT_REF:
1412 {
1413 tree t = remove_conversions (operand, false);
1414 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1415
1416 /* If TYPE is a thin pointer, either first retrieve the base if this
1417 is an expression with an offset built for the initialization of an
1418 object with an unconstrained nominal subtype, or else convert to
1419 the fat pointer. */
1420 if (TYPE_IS_THIN_POINTER_P (type))
1421 {
1422 tree rec_type = TREE_TYPE (type);
1423
1424 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1425 && TREE_OPERAND (operand, 1)
1426 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1427 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1428 {
1429 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1430 type = TREE_TYPE (operand);
1431 }
1432 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1433 {
1434 operand
1435 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1436 operand);
1437 type = TREE_TYPE (operand);
1438 }
1439 }
1440
1441 /* If we want to refer to an unconstrained array, use the appropriate
1442 expression. But this will never survive down to the back-end. */
1443 if (TYPE_IS_FAT_POINTER_P (type))
1444 {
1445 result = build1 (UNCONSTRAINED_ARRAY_REF,
1446 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1447 TREE_READONLY (result)
1448 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1449 }
1450
1451 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1452 else if (TREE_CODE (operand) == ADDR_EXPR)
1453 result = TREE_OPERAND (operand, 0);
1454
1455 /* Otherwise, build and fold the indirect reference. */
1456 else
1457 {
1458 result = build_fold_indirect_ref (operand);
1459 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1460 }
1461
1462 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1463 {
1464 TREE_SIDE_EFFECTS (result) = 1;
1465 if (TREE_CODE (result) == INDIRECT_REF)
1466 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1467 }
1468
1469 if ((TREE_CODE (result) == INDIRECT_REF
1470 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1471 && can_never_be_null)
1472 TREE_THIS_NOTRAP (result) = 1;
1473
1474 break;
1475 }
1476
1477 case NEGATE_EXPR:
1478 case BIT_NOT_EXPR:
1479 {
1480 tree modulus = ((operation_type
1481 && TREE_CODE (operation_type) == INTEGER_TYPE
1482 && TYPE_MODULAR_P (operation_type))
1483 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1484 int mod_pow2 = modulus && integer_pow2p (modulus);
1485
1486 /* If this is a modular type, there are various possibilities
1487 depending on the operation and whether the modulus is a
1488 power of two or not. */
1489
1490 if (modulus)
1491 {
1492 gcc_assert (operation_type == base_type);
1493 operand = convert (operation_type, operand);
1494
1495 /* The fastest in the negate case for binary modulus is
1496 the straightforward code; the TRUNC_MOD_EXPR below
1497 is an AND operation. */
1498 if (op_code == NEGATE_EXPR && mod_pow2)
1499 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1500 fold_build1 (NEGATE_EXPR, operation_type,
1501 operand),
1502 modulus);
1503
1504 /* For nonbinary negate case, return zero for zero operand,
1505 else return the modulus minus the operand. If the modulus
1506 is a power of two minus one, we can do the subtraction
1507 as an XOR since it is equivalent and faster on most machines. */
1508 else if (op_code == NEGATE_EXPR && !mod_pow2)
1509 {
1510 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1511 modulus,
1512 convert (operation_type,
1513 integer_one_node))))
1514 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1515 operand, modulus);
1516 else
1517 result = fold_build2 (MINUS_EXPR, operation_type,
1518 modulus, operand);
1519
1520 result = fold_build3 (COND_EXPR, operation_type,
1521 fold_build2 (NE_EXPR,
1522 boolean_type_node,
1523 operand,
1524 convert
1525 (operation_type,
1526 integer_zero_node)),
1527 result, operand);
1528 }
1529 else
1530 {
1531 /* For the NOT cases, we need a constant equal to
1532 the modulus minus one. For a binary modulus, we
1533 XOR against the constant and subtract the operand from
1534 that constant for nonbinary modulus. */
1535
1536 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1537 convert (operation_type,
1538 integer_one_node));
1539
1540 if (mod_pow2)
1541 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1542 operand, cnst);
1543 else
1544 result = fold_build2 (MINUS_EXPR, operation_type,
1545 cnst, operand);
1546 }
1547
1548 break;
1549 }
1550 }
1551
1552 /* ... fall through ... */
1553
1554 default:
1555 gcc_assert (operation_type == base_type);
1556 result = fold_build1 (op_code, operation_type,
1557 convert (operation_type, operand));
1558 }
1559
1560 if (result_type && TREE_TYPE (result) != result_type)
1561 result = convert (result_type, result);
1562
1563 return result;
1564 }
1565 \f
1566 /* Similar, but for COND_EXPR. */
1567
1568 tree
1569 build_cond_expr (tree result_type, tree condition_operand,
1570 tree true_operand, tree false_operand)
1571 {
1572 bool addr_p = false;
1573 tree result;
1574
1575 /* The front-end verified that result, true and false operands have
1576 same base type. Convert everything to the result type. */
1577 true_operand = convert (result_type, true_operand);
1578 false_operand = convert (result_type, false_operand);
1579
1580 /* If the result type is unconstrained, take the address of the operands and
1581 then dereference the result. Likewise if the result type is passed by
1582 reference, because creating a temporary of this type is not allowed. */
1583 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1584 || TYPE_IS_BY_REFERENCE_P (result_type)
1585 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1586 {
1587 result_type = build_pointer_type (result_type);
1588 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1589 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1590 addr_p = true;
1591 }
1592
1593 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1594 true_operand, false_operand);
1595
1596 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1597 in both arms, make sure it gets evaluated by moving it ahead of the
1598 conditional expression. This is necessary because it is evaluated
1599 in only one place at run time and would otherwise be uninitialized
1600 in one of the arms. */
1601 true_operand = skip_simple_arithmetic (true_operand);
1602 false_operand = skip_simple_arithmetic (false_operand);
1603
1604 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1605 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1606
1607 if (addr_p)
1608 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1609
1610 return result;
1611 }
1612
1613 /* Similar, but for COMPOUND_EXPR. */
1614
1615 tree
1616 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1617 {
1618 bool addr_p = false;
1619 tree result;
1620
1621 /* If the result type is unconstrained, take the address of the operand and
1622 then dereference the result. Likewise if the result type is passed by
1623 reference, but this is natively handled in the gimplifier. */
1624 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1625 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1626 {
1627 result_type = build_pointer_type (result_type);
1628 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1629 addr_p = true;
1630 }
1631
1632 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1633 expr_operand);
1634
1635 if (addr_p)
1636 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1637
1638 return result;
1639 }
1640 \f
1641 /* Conveniently construct a function call expression. FNDECL names the
1642 function to be called, N is the number of arguments, and the "..."
1643 parameters are the argument expressions. Unlike build_call_expr
1644 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1645
1646 tree
1647 build_call_n_expr (tree fndecl, int n, ...)
1648 {
1649 va_list ap;
1650 tree fntype = TREE_TYPE (fndecl);
1651 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1652
1653 va_start (ap, n);
1654 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1655 va_end (ap);
1656 return fn;
1657 }
1658 \f
1659 /* Call a function that raises an exception and pass the line number and file
1660 name, if requested. MSG says which exception function to call.
1661
1662 GNAT_NODE is the gnat node conveying the source location for which the
1663 error should be signaled, or Empty in which case the error is signaled on
1664 the current ref_file_name/input_line.
1665
1666 KIND says which kind of exception this is for
1667 (N_Raise_{Constraint,Storage,Program}_Error). */
1668
1669 tree
1670 build_call_raise (int msg, Node_Id gnat_node, char kind)
1671 {
1672 tree fndecl = gnat_raise_decls[msg];
1673 tree label = get_exception_label (kind);
1674 tree filename;
1675 int line_number;
1676 const char *str;
1677 int len;
1678
1679 /* If this is to be done as a goto, handle that case. */
1680 if (label)
1681 {
1682 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1683 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1684
1685 /* If Local_Raise is present, generate
1686 Local_Raise (exception'Identity); */
1687 if (Present (local_raise))
1688 {
1689 tree gnu_local_raise
1690 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1691 tree gnu_exception_entity
1692 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1693 tree gnu_call
1694 = build_call_n_expr (gnu_local_raise, 1,
1695 build_unary_op (ADDR_EXPR, NULL_TREE,
1696 gnu_exception_entity));
1697
1698 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1699 gnu_call, gnu_result);}
1700
1701 return gnu_result;
1702 }
1703
1704 str
1705 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1706 ? ""
1707 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1708 ? IDENTIFIER_POINTER
1709 (get_identifier (Get_Name_String
1710 (Debug_Source_Name
1711 (Get_Source_File_Index (Sloc (gnat_node))))))
1712 : ref_filename;
1713
1714 len = strlen (str);
1715 filename = build_string (len, str);
1716 line_number
1717 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1718 ? Get_Logical_Line_Number (Sloc(gnat_node))
1719 : LOCATION_LINE (input_location);
1720
1721 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1722 build_index_type (size_int (len)));
1723
1724 return
1725 build_call_n_expr (fndecl, 2,
1726 build1 (ADDR_EXPR,
1727 build_pointer_type (unsigned_char_type_node),
1728 filename),
1729 build_int_cst (NULL_TREE, line_number));
1730 }
1731
1732 /* Similar to build_call_raise, for an index or range check exception as
1733 determined by MSG, with extra information generated of the form
1734 "INDEX out of range FIRST..LAST". */
1735
1736 tree
1737 build_call_raise_range (int msg, Node_Id gnat_node,
1738 tree index, tree first, tree last)
1739 {
1740 tree fndecl = gnat_raise_decls_ext[msg];
1741 tree filename;
1742 int line_number, column_number;
1743 const char *str;
1744 int len;
1745
1746 str
1747 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1748 ? ""
1749 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1750 ? IDENTIFIER_POINTER
1751 (get_identifier (Get_Name_String
1752 (Debug_Source_Name
1753 (Get_Source_File_Index (Sloc (gnat_node))))))
1754 : ref_filename;
1755
1756 len = strlen (str);
1757 filename = build_string (len, str);
1758 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1759 {
1760 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1761 column_number = Get_Column_Number (Sloc (gnat_node));
1762 }
1763 else
1764 {
1765 line_number = LOCATION_LINE (input_location);
1766 column_number = 0;
1767 }
1768
1769 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1770 build_index_type (size_int (len)));
1771
1772 return
1773 build_call_n_expr (fndecl, 6,
1774 build1 (ADDR_EXPR,
1775 build_pointer_type (unsigned_char_type_node),
1776 filename),
1777 build_int_cst (NULL_TREE, line_number),
1778 build_int_cst (NULL_TREE, column_number),
1779 convert (integer_type_node, index),
1780 convert (integer_type_node, first),
1781 convert (integer_type_node, last));
1782 }
1783
1784 /* Similar to build_call_raise, with extra information about the column
1785 where the check failed. */
1786
1787 tree
1788 build_call_raise_column (int msg, Node_Id gnat_node)
1789 {
1790 tree fndecl = gnat_raise_decls_ext[msg];
1791 tree filename;
1792 int line_number, column_number;
1793 const char *str;
1794 int len;
1795
1796 str
1797 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1798 ? ""
1799 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1800 ? IDENTIFIER_POINTER
1801 (get_identifier (Get_Name_String
1802 (Debug_Source_Name
1803 (Get_Source_File_Index (Sloc (gnat_node))))))
1804 : ref_filename;
1805
1806 len = strlen (str);
1807 filename = build_string (len, str);
1808 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1809 {
1810 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1811 column_number = Get_Column_Number (Sloc (gnat_node));
1812 }
1813 else
1814 {
1815 line_number = LOCATION_LINE (input_location);
1816 column_number = 0;
1817 }
1818
1819 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1820 build_index_type (size_int (len)));
1821
1822 return
1823 build_call_n_expr (fndecl, 3,
1824 build1 (ADDR_EXPR,
1825 build_pointer_type (unsigned_char_type_node),
1826 filename),
1827 build_int_cst (NULL_TREE, line_number),
1828 build_int_cst (NULL_TREE, column_number));
1829 }
1830 \f
1831 /* qsort comparer for the bit positions of two constructor elements
1832 for record components. */
1833
1834 static int
1835 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1836 {
1837 const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
1838 const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
1839 const_tree const field1 = elmt1->index;
1840 const_tree const field2 = elmt2->index;
1841 const int ret
1842 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1843
1844 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1845 }
1846
1847 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1848
1849 tree
1850 gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
1851 {
1852 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1853 bool read_only = true;
1854 bool side_effects = false;
1855 tree result, obj, val;
1856 unsigned int n_elmts;
1857
1858 /* Scan the elements to see if they are all constant or if any has side
1859 effects, to let us set global flags on the resulting constructor. Count
1860 the elements along the way for possible sorting purposes below. */
1861 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1862 {
1863 /* The predicate must be in keeping with output_constructor. */
1864 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1865 || (TREE_CODE (type) == RECORD_TYPE
1866 && CONSTRUCTOR_BITFIELD_P (obj)
1867 && !initializer_constant_valid_for_bitfield_p (val))
1868 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1869 allconstant = false;
1870
1871 if (!TREE_READONLY (val))
1872 read_only = false;
1873
1874 if (TREE_SIDE_EFFECTS (val))
1875 side_effects = true;
1876 }
1877
1878 /* For record types with constant components only, sort field list
1879 by increasing bit position. This is necessary to ensure the
1880 constructor can be output as static data. */
1881 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1882 v->qsort (compare_elmt_bitpos);
1883
1884 result = build_constructor (type, v);
1885 CONSTRUCTOR_NO_CLEARING (result) = 1;
1886 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1887 TREE_SIDE_EFFECTS (result) = side_effects;
1888 TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
1889 return result;
1890 }
1891 \f
1892 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1893 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1894 for the field. Don't fold the result if NO_FOLD_P is true.
1895
1896 We also handle the fact that we might have been passed a pointer to the
1897 actual record and know how to look for fields in variant parts. */
1898
1899 static tree
1900 build_simple_component_ref (tree record_variable, tree component,
1901 tree field, bool no_fold_p)
1902 {
1903 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1904 tree ref, inner_variable;
1905
1906 gcc_assert (RECORD_OR_UNION_TYPE_P (record_type)
1907 && COMPLETE_TYPE_P (record_type)
1908 && (component == NULL_TREE) != (field == NULL_TREE));
1909
1910 /* If no field was specified, look for a field with the specified name in
1911 the current record only. */
1912 if (!field)
1913 for (field = TYPE_FIELDS (record_type);
1914 field;
1915 field = DECL_CHAIN (field))
1916 if (DECL_NAME (field) == component)
1917 break;
1918
1919 if (!field)
1920 return NULL_TREE;
1921
1922 /* If this field is not in the specified record, see if we can find a field
1923 in the specified record whose original field is the same as this one. */
1924 if (DECL_CONTEXT (field) != record_type)
1925 {
1926 tree new_field;
1927
1928 /* First loop through normal components. */
1929 for (new_field = TYPE_FIELDS (record_type);
1930 new_field;
1931 new_field = DECL_CHAIN (new_field))
1932 if (SAME_FIELD_P (field, new_field))
1933 break;
1934
1935 /* Next, see if we're looking for an inherited component in an extension.
1936 If so, look through the extension directly, but not if the type contains
1937 a placeholder, as it might be needed for a later substitution. */
1938 if (!new_field
1939 && TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1940 && TYPE_ALIGN_OK (record_type)
1941 && !type_contains_placeholder_p (record_type)
1942 && TREE_CODE (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1943 == RECORD_TYPE
1944 && TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (record_variable, 0))))
1945 {
1946 ref = build_simple_component_ref (TREE_OPERAND (record_variable, 0),
1947 NULL_TREE, field, no_fold_p);
1948 if (ref)
1949 return ref;
1950 }
1951
1952 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
1953 component in the first search. Doing this search in two steps is
1954 required to avoid hidden homonymous fields in the _Parent field. */
1955 if (!new_field)
1956 for (new_field = TYPE_FIELDS (record_type);
1957 new_field;
1958 new_field = DECL_CHAIN (new_field))
1959 if (DECL_INTERNAL_P (new_field))
1960 {
1961 tree field_ref
1962 = build_simple_component_ref (record_variable,
1963 NULL_TREE, new_field, no_fold_p);
1964 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1965 no_fold_p);
1966 if (ref)
1967 return ref;
1968 }
1969
1970 field = new_field;
1971 }
1972
1973 if (!field)
1974 return NULL_TREE;
1975
1976 /* If the field's offset has overflowed, do not try to access it, as doing
1977 so may trigger sanity checks deeper in the back-end. Note that we don't
1978 need to warn since this will be done on trying to declare the object. */
1979 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1980 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1981 return NULL_TREE;
1982
1983 /* Look through conversion between type variants. This is transparent as
1984 far as the field is concerned. */
1985 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1986 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1987 == record_type)
1988 inner_variable = TREE_OPERAND (record_variable, 0);
1989 else
1990 inner_variable = record_variable;
1991
1992 ref = build3 (COMPONENT_REF, TREE_TYPE (field), inner_variable, field,
1993 NULL_TREE);
1994
1995 if (TREE_READONLY (record_variable)
1996 || TREE_READONLY (field)
1997 || TYPE_READONLY (record_type))
1998 TREE_READONLY (ref) = 1;
1999
2000 if (TREE_THIS_VOLATILE (record_variable)
2001 || TREE_THIS_VOLATILE (field)
2002 || TYPE_VOLATILE (record_type))
2003 TREE_THIS_VOLATILE (ref) = 1;
2004
2005 if (no_fold_p)
2006 return ref;
2007
2008 /* The generic folder may punt in this case because the inner array type
2009 can be self-referential, but folding is in fact not problematic. */
2010 if (TREE_CODE (record_variable) == CONSTRUCTOR
2011 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record_variable)))
2012 {
2013 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (record_variable);
2014 unsigned HOST_WIDE_INT idx;
2015 tree index, value;
2016 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2017 if (index == field)
2018 return value;
2019 return ref;
2020 }
2021
2022 return fold (ref);
2023 }
2024 \f
2025 /* Like build_simple_component_ref, except that we give an error if the
2026 reference could not be found. */
2027
2028 tree
2029 build_component_ref (tree record_variable, tree component,
2030 tree field, bool no_fold_p)
2031 {
2032 tree ref = build_simple_component_ref (record_variable, component, field,
2033 no_fold_p);
2034
2035 if (ref)
2036 return ref;
2037
2038 /* If FIELD was specified, assume this is an invalid user field so raise
2039 Constraint_Error. Otherwise, we have no type to return so abort. */
2040 gcc_assert (field);
2041 return build1 (NULL_EXPR, TREE_TYPE (field),
2042 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2043 N_Raise_Constraint_Error));
2044 }
2045 \f
2046 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2047 identically. Process the case where a GNAT_PROC to call is provided. */
2048
2049 static inline tree
2050 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2051 Entity_Id gnat_proc, Entity_Id gnat_pool)
2052 {
2053 tree gnu_proc = gnat_to_gnu (gnat_proc);
2054 tree gnu_call;
2055
2056 /* A storage pool's underlying type is a record type (for both predefined
2057 storage pools and GNAT simple storage pools). The secondary stack uses
2058 the same mechanism, but its pool object (SS_Pool) is an integer. */
2059 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2060 {
2061 /* The size is the third parameter; the alignment is the
2062 same type. */
2063 Entity_Id gnat_size_type
2064 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2065 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2066
2067 tree gnu_pool = gnat_to_gnu (gnat_pool);
2068 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2069 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2070
2071 gnu_size = convert (gnu_size_type, gnu_size);
2072 gnu_align = convert (gnu_size_type, gnu_align);
2073
2074 /* The first arg is always the address of the storage pool; next
2075 comes the address of the object, for a deallocator, then the
2076 size and alignment. */
2077 if (gnu_obj)
2078 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2079 gnu_size, gnu_align);
2080 else
2081 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2082 gnu_size, gnu_align);
2083 }
2084
2085 /* Secondary stack case. */
2086 else
2087 {
2088 /* The size is the second parameter. */
2089 Entity_Id gnat_size_type
2090 = Etype (Next_Formal (First_Formal (gnat_proc)));
2091 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2092
2093 gnu_size = convert (gnu_size_type, gnu_size);
2094
2095 /* The first arg is the address of the object, for a deallocator,
2096 then the size. */
2097 if (gnu_obj)
2098 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2099 else
2100 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2101 }
2102
2103 return gnu_call;
2104 }
2105
2106 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2107 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2108 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2109 latter offers. */
2110
2111 static inline tree
2112 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2113 {
2114 /* When the DATA_TYPE alignment is stricter than what malloc offers
2115 (super-aligned case), we allocate an "aligning" wrapper type and return
2116 the address of its single data field with the malloc's return value
2117 stored just in front. */
2118
2119 unsigned int data_align = TYPE_ALIGN (data_type);
2120 unsigned int system_allocator_alignment
2121 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2122
2123 tree aligning_type
2124 = ((data_align > system_allocator_alignment)
2125 ? make_aligning_type (data_type, data_align, data_size,
2126 system_allocator_alignment,
2127 POINTER_SIZE / BITS_PER_UNIT,
2128 gnat_node)
2129 : NULL_TREE);
2130
2131 tree size_to_malloc
2132 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2133
2134 tree malloc_ptr;
2135
2136 /* On VMS, if pointers are 64-bit and the allocator size is 32-bit or
2137 Convention C, allocate 32-bit memory. */
2138 if (TARGET_ABI_OPEN_VMS
2139 && POINTER_SIZE == 64
2140 && Nkind (gnat_node) == N_Allocator
2141 && (UI_To_Int (Esize (Etype (gnat_node))) == 32
2142 || Convention (Etype (gnat_node)) == Convention_C))
2143 malloc_ptr = build_call_n_expr (malloc32_decl, 1, size_to_malloc);
2144 else
2145 malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2146
2147 if (aligning_type)
2148 {
2149 /* Latch malloc's return value and get a pointer to the aligning field
2150 first. */
2151 tree storage_ptr = gnat_protect_expr (malloc_ptr);
2152
2153 tree aligning_record_addr
2154 = convert (build_pointer_type (aligning_type), storage_ptr);
2155
2156 tree aligning_record
2157 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2158
2159 tree aligning_field
2160 = build_component_ref (aligning_record, NULL_TREE,
2161 TYPE_FIELDS (aligning_type), false);
2162
2163 tree aligning_field_addr
2164 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2165
2166 /* Then arrange to store the allocator's return value ahead
2167 and return. */
2168 tree storage_ptr_slot_addr
2169 = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
2170 convert (ptr_void_type_node, aligning_field_addr),
2171 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2172 / BITS_PER_UNIT));
2173
2174 tree storage_ptr_slot
2175 = build_unary_op (INDIRECT_REF, NULL_TREE,
2176 convert (build_pointer_type (ptr_void_type_node),
2177 storage_ptr_slot_addr));
2178
2179 return
2180 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2181 build_binary_op (INIT_EXPR, NULL_TREE,
2182 storage_ptr_slot, storage_ptr),
2183 aligning_field_addr);
2184 }
2185 else
2186 return malloc_ptr;
2187 }
2188
2189 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2190 designated by DATA_PTR using the __gnat_free entry point. */
2191
2192 static inline tree
2193 maybe_wrap_free (tree data_ptr, tree data_type)
2194 {
2195 /* In the regular alignment case, we pass the data pointer straight to free.
2196 In the superaligned case, we need to retrieve the initial allocator
2197 return value, stored in front of the data block at allocation time. */
2198
2199 unsigned int data_align = TYPE_ALIGN (data_type);
2200 unsigned int system_allocator_alignment
2201 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2202
2203 tree free_ptr;
2204
2205 if (data_align > system_allocator_alignment)
2206 {
2207 /* DATA_FRONT_PTR (void *)
2208 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2209 tree data_front_ptr
2210 = build_binary_op
2211 (POINTER_PLUS_EXPR, ptr_void_type_node,
2212 convert (ptr_void_type_node, data_ptr),
2213 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2214
2215 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2216 free_ptr
2217 = build_unary_op
2218 (INDIRECT_REF, NULL_TREE,
2219 convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
2220 }
2221 else
2222 free_ptr = data_ptr;
2223
2224 return build_call_n_expr (free_decl, 1, free_ptr);
2225 }
2226
2227 /* Build a GCC tree to call an allocation or deallocation function.
2228 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2229 generate an allocator.
2230
2231 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2232 object type, used to determine the to-be-honored address alignment.
2233 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2234 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2235 to provide an error location for restriction violation messages. */
2236
2237 tree
2238 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2239 Entity_Id gnat_proc, Entity_Id gnat_pool,
2240 Node_Id gnat_node)
2241 {
2242 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
2243
2244 /* Explicit proc to call ? This one is assumed to deal with the type
2245 alignment constraints. */
2246 if (Present (gnat_proc))
2247 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2248 gnat_proc, gnat_pool);
2249
2250 /* Otherwise, object to "free" or "malloc" with possible special processing
2251 for alignments stricter than what the default allocator honors. */
2252 else if (gnu_obj)
2253 return maybe_wrap_free (gnu_obj, gnu_type);
2254 else
2255 {
2256 /* Assert that we no longer can be called with this special pool. */
2257 gcc_assert (gnat_pool != -1);
2258
2259 /* Check that we aren't violating the associated restriction. */
2260 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2261 Check_No_Implicit_Heap_Alloc (gnat_node);
2262
2263 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2264 }
2265 }
2266 \f
2267 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2268 initial value is INIT, if INIT is nonzero. Convert the expression to
2269 RESULT_TYPE, which must be some pointer type, and return the result.
2270
2271 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2272 the storage pool to use. GNAT_NODE is used to provide an error
2273 location for restriction violation messages. If IGNORE_INIT_TYPE is
2274 true, ignore the type of INIT for the purpose of determining the size;
2275 this will cause the maximum size to be allocated if TYPE is of
2276 self-referential size. */
2277
2278 tree
2279 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2280 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2281 {
2282 tree size, storage, storage_deref, storage_init;
2283
2284 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2285 if (init && TREE_CODE (init) == NULL_EXPR)
2286 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2287
2288 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2289 else if (init && TREE_CODE (init) == COND_EXPR)
2290 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2291 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2292 gnat_proc, gnat_pool, gnat_node,
2293 ignore_init_type),
2294 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2295 gnat_proc, gnat_pool, gnat_node,
2296 ignore_init_type));
2297
2298 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2299 sizes of the object and its template. Allocate the whole thing and
2300 fill in the parts that are known. */
2301 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2302 {
2303 tree storage_type
2304 = build_unc_object_type_from_ptr (result_type, type,
2305 get_identifier ("ALLOC"), false);
2306 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2307 tree storage_ptr_type = build_pointer_type (storage_type);
2308
2309 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2310 init);
2311
2312 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2313 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2314 size = size_int (-1);
2315
2316 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2317 gnat_proc, gnat_pool, gnat_node);
2318 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2319 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2320 TREE_THIS_NOTRAP (storage_deref) = 1;
2321
2322 /* If there is an initializing expression, then make a constructor for
2323 the entire object including the bounds and copy it into the object.
2324 If there is no initializing expression, just set the bounds. */
2325 if (init)
2326 {
2327 vec<constructor_elt, va_gc> *v;
2328 vec_alloc (v, 2);
2329
2330 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2331 build_template (template_type, type, init));
2332 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2333 init);
2334 storage_init
2335 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2336 gnat_build_constructor (storage_type, v));
2337 }
2338 else
2339 storage_init
2340 = build_binary_op (INIT_EXPR, NULL_TREE,
2341 build_component_ref (storage_deref, NULL_TREE,
2342 TYPE_FIELDS (storage_type),
2343 false),
2344 build_template (template_type, type, NULL_TREE));
2345
2346 return build2 (COMPOUND_EXPR, result_type,
2347 storage_init, convert (result_type, storage));
2348 }
2349
2350 size = TYPE_SIZE_UNIT (type);
2351
2352 /* If we have an initializing expression, see if its size is simpler
2353 than the size from the type. */
2354 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2355 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2356 || CONTAINS_PLACEHOLDER_P (size)))
2357 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2358
2359 /* If the size is still self-referential, reference the initializing
2360 expression, if it is present. If not, this must have been a
2361 call to allocate a library-level object, in which case we use
2362 the maximum size. */
2363 if (CONTAINS_PLACEHOLDER_P (size))
2364 {
2365 if (!ignore_init_type && init)
2366 size = substitute_placeholder_in_expr (size, init);
2367 else
2368 size = max_size (size, true);
2369 }
2370
2371 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2372 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2373 size = size_int (-1);
2374
2375 storage = convert (result_type,
2376 build_call_alloc_dealloc (NULL_TREE, size, type,
2377 gnat_proc, gnat_pool,
2378 gnat_node));
2379
2380 /* If we have an initial value, protect the new address, assign the value
2381 and return the address with a COMPOUND_EXPR. */
2382 if (init)
2383 {
2384 storage = gnat_protect_expr (storage);
2385 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2386 TREE_THIS_NOTRAP (storage_deref) = 1;
2387 storage_init
2388 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2389 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2390 }
2391
2392 return storage;
2393 }
2394 \f
2395 /* Indicate that we need to take the address of T and that it therefore
2396 should not be allocated in a register. Returns true if successful. */
2397
2398 bool
2399 gnat_mark_addressable (tree t)
2400 {
2401 while (true)
2402 switch (TREE_CODE (t))
2403 {
2404 case ADDR_EXPR:
2405 case COMPONENT_REF:
2406 case ARRAY_REF:
2407 case ARRAY_RANGE_REF:
2408 case REALPART_EXPR:
2409 case IMAGPART_EXPR:
2410 case VIEW_CONVERT_EXPR:
2411 case NON_LVALUE_EXPR:
2412 CASE_CONVERT:
2413 t = TREE_OPERAND (t, 0);
2414 break;
2415
2416 case COMPOUND_EXPR:
2417 t = TREE_OPERAND (t, 1);
2418 break;
2419
2420 case CONSTRUCTOR:
2421 TREE_ADDRESSABLE (t) = 1;
2422 return true;
2423
2424 case VAR_DECL:
2425 case PARM_DECL:
2426 case RESULT_DECL:
2427 TREE_ADDRESSABLE (t) = 1;
2428 return true;
2429
2430 case FUNCTION_DECL:
2431 TREE_ADDRESSABLE (t) = 1;
2432 return true;
2433
2434 case CONST_DECL:
2435 return DECL_CONST_CORRESPONDING_VAR (t)
2436 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2437
2438 default:
2439 return true;
2440 }
2441 }
2442 \f
2443 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2444 but we know how to handle our own nodes. */
2445
2446 tree
2447 gnat_save_expr (tree exp)
2448 {
2449 tree type = TREE_TYPE (exp);
2450 enum tree_code code = TREE_CODE (exp);
2451
2452 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2453 return exp;
2454
2455 if (code == UNCONSTRAINED_ARRAY_REF)
2456 {
2457 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2458 TREE_READONLY (t) = TYPE_READONLY (type);
2459 return t;
2460 }
2461
2462 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2463 This may be more efficient, but will also allow us to more easily find
2464 the match for the PLACEHOLDER_EXPR. */
2465 if (code == COMPONENT_REF
2466 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2467 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2468 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2469
2470 return save_expr (exp);
2471 }
2472
2473 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2474 is optimized under the assumption that EXP's value doesn't change before
2475 its subsequent reuse(s) except through its potential reevaluation. */
2476
2477 tree
2478 gnat_protect_expr (tree exp)
2479 {
2480 tree type = TREE_TYPE (exp);
2481 enum tree_code code = TREE_CODE (exp);
2482
2483 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2484 return exp;
2485
2486 /* If EXP has no side effects, we theoretically don't need to do anything.
2487 However, we may be recursively passed more and more complex expressions
2488 involving checks which will be reused multiple times and eventually be
2489 unshared for gimplification; in order to avoid a complexity explosion
2490 at that point, we protect any expressions more complex than a simple
2491 arithmetic expression. */
2492 if (!TREE_SIDE_EFFECTS (exp))
2493 {
2494 tree inner = skip_simple_arithmetic (exp);
2495 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2496 return exp;
2497 }
2498
2499 /* If this is a conversion, protect what's inside the conversion. */
2500 if (code == NON_LVALUE_EXPR
2501 || CONVERT_EXPR_CODE_P (code)
2502 || code == VIEW_CONVERT_EXPR)
2503 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2504
2505 /* If we're indirectly referencing something, we only need to protect the
2506 address since the data itself can't change in these situations. */
2507 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2508 {
2509 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2510 TREE_READONLY (t) = TYPE_READONLY (type);
2511 return t;
2512 }
2513
2514 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2515 This may be more efficient, but will also allow us to more easily find
2516 the match for the PLACEHOLDER_EXPR. */
2517 if (code == COMPONENT_REF
2518 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2519 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2520 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2521
2522 /* If this is a fat pointer or something that can be placed in a register,
2523 just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
2524 returned via invisible reference in most ABIs so the temporary will
2525 directly be filled by the callee. */
2526 if (TYPE_IS_FAT_POINTER_P (type)
2527 || TYPE_MODE (type) != BLKmode
2528 || code == CALL_EXPR)
2529 return save_expr (exp);
2530
2531 /* Otherwise reference, protect the address and dereference. */
2532 return
2533 build_unary_op (INDIRECT_REF, type,
2534 save_expr (build_unary_op (ADDR_EXPR,
2535 build_reference_type (type),
2536 exp)));
2537 }
2538
2539 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2540 argument to force evaluation of everything. */
2541
2542 static tree
2543 gnat_stabilize_reference_1 (tree e, bool force)
2544 {
2545 enum tree_code code = TREE_CODE (e);
2546 tree type = TREE_TYPE (e);
2547 tree result;
2548
2549 /* We cannot ignore const expressions because it might be a reference
2550 to a const array but whose index contains side-effects. But we can
2551 ignore things that are actual constant or that already have been
2552 handled by this function. */
2553 if (TREE_CONSTANT (e) || code == SAVE_EXPR)
2554 return e;
2555
2556 switch (TREE_CODE_CLASS (code))
2557 {
2558 case tcc_exceptional:
2559 case tcc_declaration:
2560 case tcc_comparison:
2561 case tcc_expression:
2562 case tcc_reference:
2563 case tcc_vl_exp:
2564 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2565 fat pointer. This may be more efficient, but will also allow
2566 us to more easily find the match for the PLACEHOLDER_EXPR. */
2567 if (code == COMPONENT_REF
2568 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2569 result
2570 = build3 (code, type,
2571 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2572 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2573 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2574 so that it will only be evaluated once. */
2575 /* The tcc_reference and tcc_comparison classes could be handled as
2576 below, but it is generally faster to only evaluate them once. */
2577 else if (TREE_SIDE_EFFECTS (e) || force)
2578 return save_expr (e);
2579 else
2580 return e;
2581 break;
2582
2583 case tcc_binary:
2584 /* Recursively stabilize each operand. */
2585 result
2586 = build2 (code, type,
2587 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2588 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), force));
2589 break;
2590
2591 case tcc_unary:
2592 /* Recursively stabilize each operand. */
2593 result
2594 = build1 (code, type,
2595 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force));
2596 break;
2597
2598 default:
2599 gcc_unreachable ();
2600 }
2601
2602 /* See similar handling in gnat_stabilize_reference. */
2603 TREE_READONLY (result) = TREE_READONLY (e);
2604 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2605 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2606
2607 if (code == INDIRECT_REF
2608 || code == UNCONSTRAINED_ARRAY_REF
2609 || code == ARRAY_REF
2610 || code == ARRAY_RANGE_REF)
2611 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (e);
2612
2613 return result;
2614 }
2615
2616 /* This is equivalent to stabilize_reference in tree.c but we know how to
2617 handle our own nodes and we take extra arguments. FORCE says whether to
2618 force evaluation of everything. We set SUCCESS to true unless we walk
2619 through something we don't know how to stabilize. */
2620
2621 tree
2622 gnat_stabilize_reference (tree ref, bool force, bool *success)
2623 {
2624 tree type = TREE_TYPE (ref);
2625 enum tree_code code = TREE_CODE (ref);
2626 tree result;
2627
2628 /* Assume we'll success unless proven otherwise. */
2629 if (success)
2630 *success = true;
2631
2632 switch (code)
2633 {
2634 case CONST_DECL:
2635 case VAR_DECL:
2636 case PARM_DECL:
2637 case RESULT_DECL:
2638 /* No action is needed in this case. */
2639 return ref;
2640
2641 case ADDR_EXPR:
2642 CASE_CONVERT:
2643 case FLOAT_EXPR:
2644 case FIX_TRUNC_EXPR:
2645 case VIEW_CONVERT_EXPR:
2646 result
2647 = build1 (code, type,
2648 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2649 success));
2650 break;
2651
2652 case INDIRECT_REF:
2653 case UNCONSTRAINED_ARRAY_REF:
2654 result = build1 (code, type,
2655 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 0),
2656 force));
2657 break;
2658
2659 case COMPONENT_REF:
2660 result = build3 (COMPONENT_REF, type,
2661 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2662 success),
2663 TREE_OPERAND (ref, 1), NULL_TREE);
2664 break;
2665
2666 case BIT_FIELD_REF:
2667 result = build3 (BIT_FIELD_REF, type,
2668 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2669 success),
2670 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
2671 break;
2672
2673 case ARRAY_REF:
2674 case ARRAY_RANGE_REF:
2675 result = build4 (code, type,
2676 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2677 success),
2678 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2679 force),
2680 NULL_TREE, NULL_TREE);
2681 break;
2682
2683 case CALL_EXPR:
2684 result = gnat_stabilize_reference_1 (ref, force);
2685 break;
2686
2687 case COMPOUND_EXPR:
2688 result = build2 (COMPOUND_EXPR, type,
2689 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2690 success),
2691 gnat_stabilize_reference (TREE_OPERAND (ref, 1), force,
2692 success));
2693 break;
2694
2695 case CONSTRUCTOR:
2696 /* Constructors with 1 element are used extensively to formally
2697 convert objects to special wrapping types. */
2698 if (TREE_CODE (type) == RECORD_TYPE
2699 && vec_safe_length (CONSTRUCTOR_ELTS (ref)) == 1)
2700 {
2701 tree index = (*CONSTRUCTOR_ELTS (ref))[0].index;
2702 tree value = (*CONSTRUCTOR_ELTS (ref))[0].value;
2703 result
2704 = build_constructor_single (type, index,
2705 gnat_stabilize_reference_1 (value,
2706 force));
2707 }
2708 else
2709 {
2710 if (success)
2711 *success = false;
2712 return ref;
2713 }
2714 break;
2715
2716 case ERROR_MARK:
2717 ref = error_mark_node;
2718
2719 /* ... fall through to failure ... */
2720
2721 /* If arg isn't a kind of lvalue we recognize, make no change.
2722 Caller should recognize the error for an invalid lvalue. */
2723 default:
2724 if (success)
2725 *success = false;
2726 return ref;
2727 }
2728
2729 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2730 may not be sustained across some paths, such as the way via build1 for
2731 INDIRECT_REF. We reset those flags here in the general case, which is
2732 consistent with the GCC version of this routine.
2733
2734 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2735 paths introduce side-effects where there was none initially (e.g. if a
2736 SAVE_EXPR is built) and we also want to keep track of that. */
2737 TREE_READONLY (result) = TREE_READONLY (ref);
2738 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2739 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2740
2741 if (code == INDIRECT_REF
2742 || code == UNCONSTRAINED_ARRAY_REF
2743 || code == ARRAY_REF
2744 || code == ARRAY_RANGE_REF)
2745 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2746
2747 return result;
2748 }
2749
2750 /* If EXPR is an expression that is invariant in the current function, in the
2751 sense that it can be evaluated anywhere in the function and any number of
2752 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2753
2754 tree
2755 gnat_invariant_expr (tree expr)
2756 {
2757 tree type = TREE_TYPE (expr), t;
2758
2759 expr = remove_conversions (expr, false);
2760
2761 while ((TREE_CODE (expr) == CONST_DECL
2762 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2763 && decl_function_context (expr) == current_function_decl
2764 && DECL_INITIAL (expr))
2765 expr = remove_conversions (DECL_INITIAL (expr), false);
2766
2767 if (TREE_CONSTANT (expr))
2768 return fold_convert (type, expr);
2769
2770 t = expr;
2771
2772 while (true)
2773 {
2774 switch (TREE_CODE (t))
2775 {
2776 case COMPONENT_REF:
2777 if (TREE_OPERAND (t, 2) != NULL_TREE)
2778 return NULL_TREE;
2779 break;
2780
2781 case ARRAY_REF:
2782 case ARRAY_RANGE_REF:
2783 if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
2784 || TREE_OPERAND (t, 2) != NULL_TREE
2785 || TREE_OPERAND (t, 3) != NULL_TREE)
2786 return NULL_TREE;
2787 break;
2788
2789 case BIT_FIELD_REF:
2790 case VIEW_CONVERT_EXPR:
2791 case REALPART_EXPR:
2792 case IMAGPART_EXPR:
2793 break;
2794
2795 case INDIRECT_REF:
2796 if (!TREE_READONLY (t)
2797 || TREE_SIDE_EFFECTS (t)
2798 || !TREE_THIS_NOTRAP (t))
2799 return NULL_TREE;
2800 break;
2801
2802 default:
2803 goto object;
2804 }
2805
2806 t = TREE_OPERAND (t, 0);
2807 }
2808
2809 object:
2810 if (TREE_SIDE_EFFECTS (t))
2811 return NULL_TREE;
2812
2813 if (TREE_CODE (t) == CONST_DECL
2814 && (DECL_EXTERNAL (t)
2815 || decl_function_context (t) != current_function_decl))
2816 return fold_convert (type, expr);
2817
2818 if (!TREE_READONLY (t))
2819 return NULL_TREE;
2820
2821 if (TREE_CODE (t) == CONSTRUCTOR || TREE_CODE (t) == PARM_DECL)
2822 return fold_convert (type, expr);
2823
2824 if (TREE_CODE (t) == VAR_DECL
2825 && (DECL_EXTERNAL (t)
2826 || decl_function_context (t) != current_function_decl))
2827 return fold_convert (type, expr);
2828
2829 return NULL_TREE;
2830 }
This page took 0.165627 seconds and 6 git commands to generate.