]> gcc.gnu.org Git - gcc.git/blob - gcc/ada/gcc-interface/utils2.c
fe.h (Serious_Errors_Detected): New macro.
[gcc.git] / gcc / ada / gcc-interface / utils2.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2011, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "toplev.h"
33 #include "ggc.h"
34 #include "output.h"
35 #include "tree-inline.h"
36
37 #include "ada.h"
38 #include "types.h"
39 #include "atree.h"
40 #include "elists.h"
41 #include "namet.h"
42 #include "nlists.h"
43 #include "snames.h"
44 #include "stringt.h"
45 #include "uintp.h"
46 #include "fe.h"
47 #include "sinfo.h"
48 #include "einfo.h"
49 #include "ada-tree.h"
50 #include "gigi.h"
51
52 /* Return the base type of TYPE. */
53
54 tree
55 get_base_type (tree type)
56 {
57 if (TREE_CODE (type) == RECORD_TYPE
58 && TYPE_JUSTIFIED_MODULAR_P (type))
59 type = TREE_TYPE (TYPE_FIELDS (type));
60
61 while (TREE_TYPE (type)
62 && (TREE_CODE (type) == INTEGER_TYPE
63 || TREE_CODE (type) == REAL_TYPE))
64 type = TREE_TYPE (type);
65
66 return type;
67 }
68 \f
69 /* EXP is a GCC tree representing an address. See if we can find how
70 strictly the object at that address is aligned. Return that alignment
71 in bits. If we don't know anything about the alignment, return 0. */
72
73 unsigned int
74 known_alignment (tree exp)
75 {
76 unsigned int this_alignment;
77 unsigned int lhs, rhs;
78
79 switch (TREE_CODE (exp))
80 {
81 CASE_CONVERT:
82 case VIEW_CONVERT_EXPR:
83 case NON_LVALUE_EXPR:
84 /* Conversions between pointers and integers don't change the alignment
85 of the underlying object. */
86 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
87 break;
88
89 case COMPOUND_EXPR:
90 /* The value of a COMPOUND_EXPR is that of it's second operand. */
91 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
92 break;
93
94 case PLUS_EXPR:
95 case MINUS_EXPR:
96 /* If two address are added, the alignment of the result is the
97 minimum of the two alignments. */
98 lhs = known_alignment (TREE_OPERAND (exp, 0));
99 rhs = known_alignment (TREE_OPERAND (exp, 1));
100 this_alignment = MIN (lhs, rhs);
101 break;
102
103 case POINTER_PLUS_EXPR:
104 lhs = known_alignment (TREE_OPERAND (exp, 0));
105 rhs = known_alignment (TREE_OPERAND (exp, 1));
106 /* If we don't know the alignment of the offset, we assume that
107 of the base. */
108 if (rhs == 0)
109 this_alignment = lhs;
110 else
111 this_alignment = MIN (lhs, rhs);
112 break;
113
114 case COND_EXPR:
115 /* If there is a choice between two values, use the smallest one. */
116 lhs = known_alignment (TREE_OPERAND (exp, 1));
117 rhs = known_alignment (TREE_OPERAND (exp, 2));
118 this_alignment = MIN (lhs, rhs);
119 break;
120
121 case INTEGER_CST:
122 {
123 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
124 /* The first part of this represents the lowest bit in the constant,
125 but it is originally in bytes, not bits. */
126 this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
127 }
128 break;
129
130 case MULT_EXPR:
131 /* If we know the alignment of just one side, use it. Otherwise,
132 use the product of the alignments. */
133 lhs = known_alignment (TREE_OPERAND (exp, 0));
134 rhs = known_alignment (TREE_OPERAND (exp, 1));
135
136 if (lhs == 0)
137 this_alignment = rhs;
138 else if (rhs == 0)
139 this_alignment = lhs;
140 else
141 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
142 break;
143
144 case BIT_AND_EXPR:
145 /* A bit-and expression is as aligned as the maximum alignment of the
146 operands. We typically get here for a complex lhs and a constant
147 negative power of two on the rhs to force an explicit alignment, so
148 don't bother looking at the lhs. */
149 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
150 break;
151
152 case ADDR_EXPR:
153 this_alignment = expr_align (TREE_OPERAND (exp, 0));
154 break;
155
156 case CALL_EXPR:
157 {
158 tree t = maybe_inline_call_in_expr (exp);
159 if (t)
160 return known_alignment (t);
161 }
162
163 /* Fall through... */
164
165 default:
166 /* For other pointer expressions, we assume that the pointed-to object
167 is at least as aligned as the pointed-to type. Beware that we can
168 have a dummy type here (e.g. a Taft Amendment type), for which the
169 alignment is meaningless and should be ignored. */
170 if (POINTER_TYPE_P (TREE_TYPE (exp))
171 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
172 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
173 else
174 this_alignment = 0;
175 break;
176 }
177
178 return this_alignment;
179 }
180 \f
181 /* We have a comparison or assignment operation on two types, T1 and T2, which
182 are either both array types or both record types. T1 is assumed to be for
183 the left hand side operand, and T2 for the right hand side. Return the
184 type that both operands should be converted to for the operation, if any.
185 Otherwise return zero. */
186
187 static tree
188 find_common_type (tree t1, tree t2)
189 {
190 /* ??? As of today, various constructs lead to here with types of different
191 sizes even when both constants (e.g. tagged types, packable vs regular
192 component types, padded vs unpadded types, ...). While some of these
193 would better be handled upstream (types should be made consistent before
194 calling into build_binary_op), some others are really expected and we
195 have to be careful. */
196
197 /* We must avoid writing more than what the target can hold if this is for
198 an assignment and the case of tagged types is handled in build_binary_op
199 so we use the lhs type if it is known to be smaller or of constant size
200 and the rhs type is not, whatever the modes. We also force t1 in case of
201 constant size equality to minimize occurrences of view conversions on the
202 lhs of an assignment, except for the case of record types with a variant
203 part on the lhs but not on the rhs to make the conversion simpler. */
204 if (TREE_CONSTANT (TYPE_SIZE (t1))
205 && (!TREE_CONSTANT (TYPE_SIZE (t2))
206 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
207 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
208 && !(TREE_CODE (t1) == RECORD_TYPE
209 && TREE_CODE (t2) == RECORD_TYPE
210 && get_variant_part (t1) != NULL_TREE
211 && get_variant_part (t2) == NULL_TREE))))
212 return t1;
213
214 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
215 that we will not have any alignment problems since, if we did, the
216 non-BLKmode type could not have been used. */
217 if (TYPE_MODE (t1) != BLKmode)
218 return t1;
219
220 /* If the rhs type is of constant size, use it whatever the modes. At
221 this point it is known to be smaller, or of constant size and the
222 lhs type is not. */
223 if (TREE_CONSTANT (TYPE_SIZE (t2)))
224 return t2;
225
226 /* Otherwise, if the rhs type is non-BLKmode, use it. */
227 if (TYPE_MODE (t2) != BLKmode)
228 return t2;
229
230 /* In this case, both types have variable size and BLKmode. It's
231 probably best to leave the "type mismatch" because changing it
232 could cause a bad self-referential reference. */
233 return NULL_TREE;
234 }
235 \f
236 /* Return an expression tree representing an equality comparison of A1 and A2,
237 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
238
239 Two arrays are equal in one of two ways: (1) if both have zero length in
240 some dimension (not necessarily the same dimension) or (2) if the lengths
241 in each dimension are equal and the data is equal. We perform the length
242 tests in as efficient a manner as possible. */
243
244 static tree
245 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
246 {
247 tree result = convert (result_type, boolean_true_node);
248 tree a1_is_null = convert (result_type, boolean_false_node);
249 tree a2_is_null = convert (result_type, boolean_false_node);
250 tree t1 = TREE_TYPE (a1);
251 tree t2 = TREE_TYPE (a2);
252 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
253 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
254 bool length_zero_p = false;
255
256 /* If either operand has side-effects, they have to be evaluated only once
257 in spite of the multiple references to the operand in the comparison. */
258 if (a1_side_effects_p)
259 a1 = gnat_protect_expr (a1);
260
261 if (a2_side_effects_p)
262 a2 = gnat_protect_expr (a2);
263
264 /* Process each dimension separately and compare the lengths. If any
265 dimension has a length known to be zero, set LENGTH_ZERO_P to true
266 in order to suppress the comparison of the data at the end. */
267 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
268 {
269 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
270 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
271 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
272 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
273 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
274 size_one_node);
275 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
276 size_one_node);
277 tree comparison, this_a1_is_null, this_a2_is_null;
278
279 /* If the length of the first array is a constant, swap our operands
280 unless the length of the second array is the constant zero. */
281 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
282 {
283 tree tem;
284 bool btem;
285
286 tem = a1, a1 = a2, a2 = tem;
287 tem = t1, t1 = t2, t2 = tem;
288 tem = lb1, lb1 = lb2, lb2 = tem;
289 tem = ub1, ub1 = ub2, ub2 = tem;
290 tem = length1, length1 = length2, length2 = tem;
291 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
292 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
293 a2_side_effects_p = btem;
294 }
295
296 /* If the length of the second array is the constant zero, we can just
297 use the original stored bounds for the first array and see whether
298 last < first holds. */
299 if (integer_zerop (length2))
300 {
301 length_zero_p = true;
302
303 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
304 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
305
306 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
307 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
308 if (EXPR_P (comparison))
309 SET_EXPR_LOCATION (comparison, loc);
310
311 this_a1_is_null = comparison;
312 this_a2_is_null = convert (result_type, boolean_true_node);
313 }
314
315 /* Otherwise, if the length is some other constant value, we know that
316 this dimension in the second array cannot be superflat, so we can
317 just use its length computed from the actual stored bounds. */
318 else if (TREE_CODE (length2) == INTEGER_CST)
319 {
320 tree bt;
321
322 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
323 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
324 /* Note that we know that UB2 and LB2 are constant and hence
325 cannot contain a PLACEHOLDER_EXPR. */
326 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
327 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
328 bt = get_base_type (TREE_TYPE (ub1));
329
330 comparison
331 = fold_build2_loc (loc, EQ_EXPR, result_type,
332 build_binary_op (MINUS_EXPR, bt, ub1, lb1),
333 build_binary_op (MINUS_EXPR, bt, ub2, lb2));
334 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
335 if (EXPR_P (comparison))
336 SET_EXPR_LOCATION (comparison, loc);
337
338 this_a1_is_null
339 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
340
341 this_a2_is_null = convert (result_type, boolean_false_node);
342 }
343
344 /* Otherwise, compare the computed lengths. */
345 else
346 {
347 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
348 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
349
350 comparison
351 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
352
353 /* If the length expression is of the form (cond ? val : 0), assume
354 that cond is equivalent to (length != 0). That's guaranteed by
355 construction of the array types in gnat_to_gnu_entity. */
356 if (TREE_CODE (length1) == COND_EXPR
357 && integer_zerop (TREE_OPERAND (length1, 2)))
358 this_a1_is_null
359 = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
360 else
361 this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
362 length1, size_zero_node);
363
364 /* Likewise for the second array. */
365 if (TREE_CODE (length2) == COND_EXPR
366 && integer_zerop (TREE_OPERAND (length2, 2)))
367 this_a2_is_null
368 = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
369 else
370 this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
371 length2, size_zero_node);
372 }
373
374 /* Append expressions for this dimension to the final expressions. */
375 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
376 result, comparison);
377
378 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
379 this_a1_is_null, a1_is_null);
380
381 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
382 this_a2_is_null, a2_is_null);
383
384 t1 = TREE_TYPE (t1);
385 t2 = TREE_TYPE (t2);
386 }
387
388 /* Unless the length of some dimension is known to be zero, compare the
389 data in the array. */
390 if (!length_zero_p)
391 {
392 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
393 tree comparison;
394
395 if (type)
396 {
397 a1 = convert (type, a1),
398 a2 = convert (type, a2);
399 }
400
401 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
402
403 result
404 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
405 }
406
407 /* The result is also true if both sizes are zero. */
408 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
409 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
410 a1_is_null, a2_is_null),
411 result);
412
413 /* If either operand has side-effects, they have to be evaluated before
414 starting the comparison above since the place they would be otherwise
415 evaluated could be wrong. */
416 if (a1_side_effects_p)
417 result = build2 (COMPOUND_EXPR, result_type, a1, result);
418
419 if (a2_side_effects_p)
420 result = build2 (COMPOUND_EXPR, result_type, a2, result);
421
422 return result;
423 }
424
425 /* Return an expression tree representing an equality comparison of P1 and P2,
426 two objects of fat pointer type. The result should be of type RESULT_TYPE.
427
428 Two fat pointers are equal in one of two ways: (1) if both have a null
429 pointer to the array or (2) if they contain the same couple of pointers.
430 We perform the comparison in as efficient a manner as possible. */
431
432 static tree
433 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
434 {
435 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
436 tree p1_array_is_null, p2_array_is_null;
437
438 /* If either operand has side-effects, they have to be evaluated only once
439 in spite of the multiple references to the operand in the comparison. */
440 p1 = gnat_protect_expr (p1);
441 p2 = gnat_protect_expr (p2);
442
443 /* The constant folder doesn't fold fat pointer types so we do it here. */
444 if (TREE_CODE (p1) == CONSTRUCTOR)
445 p1_array = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p1), 0)->value;
446 else
447 p1_array = build_component_ref (p1, NULL_TREE,
448 TYPE_FIELDS (TREE_TYPE (p1)), true);
449
450 p1_array_is_null
451 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
452 fold_convert_loc (loc, TREE_TYPE (p1_array),
453 null_pointer_node));
454
455 if (TREE_CODE (p2) == CONSTRUCTOR)
456 p2_array = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p2), 0)->value;
457 else
458 p2_array = build_component_ref (p2, NULL_TREE,
459 TYPE_FIELDS (TREE_TYPE (p2)), true);
460
461 p2_array_is_null
462 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
463 fold_convert_loc (loc, TREE_TYPE (p2_array),
464 null_pointer_node));
465
466 /* If one of the pointers to the array is null, just compare the other. */
467 if (integer_zerop (p1_array))
468 return p2_array_is_null;
469 else if (integer_zerop (p2_array))
470 return p1_array_is_null;
471
472 /* Otherwise, do the fully-fledged comparison. */
473 same_array
474 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
475
476 if (TREE_CODE (p1) == CONSTRUCTOR)
477 p1_bounds = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p1), 1)->value;
478 else
479 p1_bounds
480 = build_component_ref (p1, NULL_TREE,
481 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))), true);
482
483 if (TREE_CODE (p2) == CONSTRUCTOR)
484 p2_bounds = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p2), 1)->value;
485 else
486 p2_bounds
487 = build_component_ref (p2, NULL_TREE,
488 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))), true);
489
490 same_bounds
491 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
492
493 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
494 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
495 build_binary_op (TRUTH_ORIF_EXPR, result_type,
496 p1_array_is_null, same_bounds));
497 }
498 \f
499 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
500 type TYPE. We know that TYPE is a modular type with a nonbinary
501 modulus. */
502
503 static tree
504 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
505 tree rhs)
506 {
507 tree modulus = TYPE_MODULUS (type);
508 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
509 unsigned int precision;
510 bool unsignedp = true;
511 tree op_type = type;
512 tree result;
513
514 /* If this is an addition of a constant, convert it to a subtraction
515 of a constant since we can do that faster. */
516 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
517 {
518 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
519 op_code = MINUS_EXPR;
520 }
521
522 /* For the logical operations, we only need PRECISION bits. For
523 addition and subtraction, we need one more and for multiplication we
524 need twice as many. But we never want to make a size smaller than
525 our size. */
526 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
527 needed_precision += 1;
528 else if (op_code == MULT_EXPR)
529 needed_precision *= 2;
530
531 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
532
533 /* Unsigned will do for everything but subtraction. */
534 if (op_code == MINUS_EXPR)
535 unsignedp = false;
536
537 /* If our type is the wrong signedness or isn't wide enough, make a new
538 type and convert both our operands to it. */
539 if (TYPE_PRECISION (op_type) < precision
540 || TYPE_UNSIGNED (op_type) != unsignedp)
541 {
542 /* Copy the node so we ensure it can be modified to make it modular. */
543 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
544 modulus = convert (op_type, modulus);
545 SET_TYPE_MODULUS (op_type, modulus);
546 TYPE_MODULAR_P (op_type) = 1;
547 lhs = convert (op_type, lhs);
548 rhs = convert (op_type, rhs);
549 }
550
551 /* Do the operation, then we'll fix it up. */
552 result = fold_build2 (op_code, op_type, lhs, rhs);
553
554 /* For multiplication, we have no choice but to do a full modulus
555 operation. However, we want to do this in the narrowest
556 possible size. */
557 if (op_code == MULT_EXPR)
558 {
559 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
560 modulus = convert (div_type, modulus);
561 SET_TYPE_MODULUS (div_type, modulus);
562 TYPE_MODULAR_P (div_type) = 1;
563 result = convert (op_type,
564 fold_build2 (TRUNC_MOD_EXPR, div_type,
565 convert (div_type, result), modulus));
566 }
567
568 /* For subtraction, add the modulus back if we are negative. */
569 else if (op_code == MINUS_EXPR)
570 {
571 result = gnat_protect_expr (result);
572 result = fold_build3 (COND_EXPR, op_type,
573 fold_build2 (LT_EXPR, boolean_type_node, result,
574 convert (op_type, integer_zero_node)),
575 fold_build2 (PLUS_EXPR, op_type, result, modulus),
576 result);
577 }
578
579 /* For the other operations, subtract the modulus if we are >= it. */
580 else
581 {
582 result = gnat_protect_expr (result);
583 result = fold_build3 (COND_EXPR, op_type,
584 fold_build2 (GE_EXPR, boolean_type_node,
585 result, modulus),
586 fold_build2 (MINUS_EXPR, op_type,
587 result, modulus),
588 result);
589 }
590
591 return convert (type, result);
592 }
593 \f
594 /* This page contains routines that implement the Ada semantics with regard
595 to atomic objects. They are fully piggybacked on the middle-end support
596 for atomic loads and stores.
597
598 *** Memory barriers and volatile objects ***
599
600 We implement the weakened form of the C.6(16) clause that was introduced
601 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
602 implementable without significant performance hits on modern platforms.
603
604 We also take advantage of the requirements imposed on shared variables by
605 9.10 (conditions for sequential actions) to have non-erroneous execution
606 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
607 volatile updates with regard to sequential actions, i.e. with regard to
608 reads or updates of atomic objects.
609
610 As such, an update of an atomic object by a task requires that all earlier
611 accesses to volatile objects have completed. Similarly, later accesses to
612 volatile objects cannot be reordered before the update of the atomic object.
613 So, memory barriers both before and after the atomic update are needed.
614
615 For a read of an atomic object, to avoid seeing writes of volatile objects
616 by a task earlier than by the other tasks, a memory barrier is needed before
617 the atomic read. Finally, to avoid reordering later reads or updates of
618 volatile objects to before the atomic read, a barrier is needed after the
619 atomic read.
620
621 So, memory barriers are needed before and after atomic reads and updates.
622 And, in order to simplify the implementation, we use full memory barriers
623 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
624
625 /* Return the size of TYPE, which must be a positive power of 2. */
626
627 static unsigned int
628 resolve_atomic_size (tree type)
629 {
630 unsigned HOST_WIDE_INT size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
631
632 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
633 return size;
634
635 /* We shouldn't reach here without having already detected that the size
636 isn't compatible with an atomic access. */
637 gcc_assert (Serious_Errors_Detected);
638
639 return 0;
640 }
641
642 /* Build an atomic load for the underlying atomic object in SRC. */
643
644 tree
645 build_atomic_load (tree src)
646 {
647 tree ptr_type
648 = build_pointer_type
649 (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
650 tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
651 tree orig_src = src;
652 tree type = TREE_TYPE (src);
653 tree t, val;
654 unsigned int size;
655 int fncode;
656
657 src = remove_conversions (src, false);
658 size = resolve_atomic_size (TREE_TYPE (src));
659 if (size == 0)
660 return orig_src;
661
662 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
663 t = builtin_decl_implicit ((enum built_in_function) fncode);
664
665 src = build_unary_op (ADDR_EXPR, ptr_type, src);
666 val = build_call_expr (t, 2, src, mem_model);
667
668 return unchecked_convert (type, val, true);
669 }
670
671 /* Build an atomic store from SRC to the underlying atomic object in DEST. */
672
673 tree
674 build_atomic_store (tree dest, tree src)
675 {
676 tree ptr_type
677 = build_pointer_type
678 (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
679 tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
680 tree orig_dest = dest;
681 tree t, int_type;
682 unsigned int size;
683 int fncode;
684
685 dest = remove_conversions (dest, false);
686 size = resolve_atomic_size (TREE_TYPE (dest));
687 if (size == 0)
688 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
689
690 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
691 t = builtin_decl_implicit ((enum built_in_function) fncode);
692 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
693
694 dest = build_unary_op (ADDR_EXPR, ptr_type, dest);
695 src = unchecked_convert (int_type, src, true);
696
697 return build_call_expr (t, 3, dest, src, mem_model);
698 }
699 \f
700 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
701 desired for the result. Usually the operation is to be performed
702 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
703 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
704 case the type to be used will be derived from the operands.
705
706 This function is very much unlike the ones for C and C++ since we
707 have already done any type conversion and matching required. All we
708 have to do here is validate the work done by SEM and handle subtypes. */
709
710 tree
711 build_binary_op (enum tree_code op_code, tree result_type,
712 tree left_operand, tree right_operand)
713 {
714 tree left_type = TREE_TYPE (left_operand);
715 tree right_type = TREE_TYPE (right_operand);
716 tree left_base_type = get_base_type (left_type);
717 tree right_base_type = get_base_type (right_type);
718 tree operation_type = result_type;
719 tree best_type = NULL_TREE;
720 tree modulus, result;
721 bool has_side_effects = false;
722
723 if (operation_type
724 && TREE_CODE (operation_type) == RECORD_TYPE
725 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
726 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
727
728 if (operation_type
729 && TREE_CODE (operation_type) == INTEGER_TYPE
730 && TYPE_EXTRA_SUBTYPE_P (operation_type))
731 operation_type = get_base_type (operation_type);
732
733 modulus = (operation_type
734 && TREE_CODE (operation_type) == INTEGER_TYPE
735 && TYPE_MODULAR_P (operation_type)
736 ? TYPE_MODULUS (operation_type) : NULL_TREE);
737
738 switch (op_code)
739 {
740 case INIT_EXPR:
741 case MODIFY_EXPR:
742 #ifdef ENABLE_CHECKING
743 gcc_assert (result_type == NULL_TREE);
744 #endif
745 /* If there were integral or pointer conversions on the LHS, remove
746 them; we'll be putting them back below if needed. Likewise for
747 conversions between array and record types, except for justified
748 modular types. But don't do this if the right operand is not
749 BLKmode (for packed arrays) unless we are not changing the mode. */
750 while ((CONVERT_EXPR_P (left_operand)
751 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
752 && (((INTEGRAL_TYPE_P (left_type)
753 || POINTER_TYPE_P (left_type))
754 && (INTEGRAL_TYPE_P (TREE_TYPE
755 (TREE_OPERAND (left_operand, 0)))
756 || POINTER_TYPE_P (TREE_TYPE
757 (TREE_OPERAND (left_operand, 0)))))
758 || (((TREE_CODE (left_type) == RECORD_TYPE
759 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
760 || TREE_CODE (left_type) == ARRAY_TYPE)
761 && ((TREE_CODE (TREE_TYPE
762 (TREE_OPERAND (left_operand, 0)))
763 == RECORD_TYPE)
764 || (TREE_CODE (TREE_TYPE
765 (TREE_OPERAND (left_operand, 0)))
766 == ARRAY_TYPE))
767 && (TYPE_MODE (right_type) == BLKmode
768 || (TYPE_MODE (left_type)
769 == TYPE_MODE (TREE_TYPE
770 (TREE_OPERAND
771 (left_operand, 0))))))))
772 {
773 left_operand = TREE_OPERAND (left_operand, 0);
774 left_type = TREE_TYPE (left_operand);
775 }
776
777 /* If a class-wide type may be involved, force use of the RHS type. */
778 if ((TREE_CODE (right_type) == RECORD_TYPE
779 || TREE_CODE (right_type) == UNION_TYPE)
780 && TYPE_ALIGN_OK (right_type))
781 operation_type = right_type;
782
783 /* If we are copying between padded objects with compatible types, use
784 the padded view of the objects, this is very likely more efficient.
785 Likewise for a padded object that is assigned a constructor, if we
786 can convert the constructor to the inner type, to avoid putting a
787 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
788 actually copied anything. */
789 else if (TYPE_IS_PADDING_P (left_type)
790 && TREE_CONSTANT (TYPE_SIZE (left_type))
791 && ((TREE_CODE (right_operand) == COMPONENT_REF
792 && TYPE_IS_PADDING_P
793 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
794 && gnat_types_compatible_p
795 (left_type,
796 TREE_TYPE (TREE_OPERAND (right_operand, 0))))
797 || (TREE_CODE (right_operand) == CONSTRUCTOR
798 && !CONTAINS_PLACEHOLDER_P
799 (DECL_SIZE (TYPE_FIELDS (left_type)))))
800 && !integer_zerop (TYPE_SIZE (right_type)))
801 operation_type = left_type;
802
803 /* If we have a call to a function that returns an unconstrained type
804 with default discriminant on the RHS, use the RHS type (which is
805 padded) as we cannot compute the size of the actual assignment. */
806 else if (TREE_CODE (right_operand) == CALL_EXPR
807 && TYPE_IS_PADDING_P (right_type)
808 && CONTAINS_PLACEHOLDER_P
809 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (right_type)))))
810 operation_type = right_type;
811
812 /* Find the best type to use for copying between aggregate types. */
813 else if (((TREE_CODE (left_type) == ARRAY_TYPE
814 && TREE_CODE (right_type) == ARRAY_TYPE)
815 || (TREE_CODE (left_type) == RECORD_TYPE
816 && TREE_CODE (right_type) == RECORD_TYPE))
817 && (best_type = find_common_type (left_type, right_type)))
818 operation_type = best_type;
819
820 /* Otherwise use the LHS type. */
821 else
822 operation_type = left_type;
823
824 /* Ensure everything on the LHS is valid. If we have a field reference,
825 strip anything that get_inner_reference can handle. Then remove any
826 conversions between types having the same code and mode. And mark
827 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
828 either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
829 result = left_operand;
830 while (true)
831 {
832 tree restype = TREE_TYPE (result);
833
834 if (TREE_CODE (result) == COMPONENT_REF
835 || TREE_CODE (result) == ARRAY_REF
836 || TREE_CODE (result) == ARRAY_RANGE_REF)
837 while (handled_component_p (result))
838 result = TREE_OPERAND (result, 0);
839 else if (TREE_CODE (result) == REALPART_EXPR
840 || TREE_CODE (result) == IMAGPART_EXPR
841 || (CONVERT_EXPR_P (result)
842 && (((TREE_CODE (restype)
843 == TREE_CODE (TREE_TYPE
844 (TREE_OPERAND (result, 0))))
845 && (TYPE_MODE (TREE_TYPE
846 (TREE_OPERAND (result, 0)))
847 == TYPE_MODE (restype)))
848 || TYPE_ALIGN_OK (restype))))
849 result = TREE_OPERAND (result, 0);
850 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
851 {
852 TREE_ADDRESSABLE (result) = 1;
853 result = TREE_OPERAND (result, 0);
854 }
855 else
856 break;
857 }
858
859 gcc_assert (TREE_CODE (result) == INDIRECT_REF
860 || TREE_CODE (result) == NULL_EXPR
861 || DECL_P (result));
862
863 /* Convert the right operand to the operation type unless it is
864 either already of the correct type or if the type involves a
865 placeholder, since the RHS may not have the same record type. */
866 if (operation_type != right_type
867 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
868 {
869 right_operand = convert (operation_type, right_operand);
870 right_type = operation_type;
871 }
872
873 /* If the left operand is not of the same type as the operation
874 type, wrap it up in a VIEW_CONVERT_EXPR. */
875 if (left_type != operation_type)
876 left_operand = unchecked_convert (operation_type, left_operand, false);
877
878 has_side_effects = true;
879 modulus = NULL_TREE;
880 break;
881
882 case ARRAY_REF:
883 if (!operation_type)
884 operation_type = TREE_TYPE (left_type);
885
886 /* ... fall through ... */
887
888 case ARRAY_RANGE_REF:
889 /* First look through conversion between type variants. Note that
890 this changes neither the operation type nor the type domain. */
891 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
892 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
893 == TYPE_MAIN_VARIANT (left_type))
894 {
895 left_operand = TREE_OPERAND (left_operand, 0);
896 left_type = TREE_TYPE (left_operand);
897 }
898
899 /* For a range, make sure the element type is consistent. */
900 if (op_code == ARRAY_RANGE_REF
901 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
902 operation_type = build_array_type (TREE_TYPE (left_type),
903 TYPE_DOMAIN (operation_type));
904
905 /* Then convert the right operand to its base type. This will prevent
906 unneeded sign conversions when sizetype is wider than integer. */
907 right_operand = convert (right_base_type, right_operand);
908 right_operand = convert_to_index_type (right_operand);
909 modulus = NULL_TREE;
910 break;
911
912 case TRUTH_ANDIF_EXPR:
913 case TRUTH_ORIF_EXPR:
914 case TRUTH_AND_EXPR:
915 case TRUTH_OR_EXPR:
916 case TRUTH_XOR_EXPR:
917 #ifdef ENABLE_CHECKING
918 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
919 #endif
920 operation_type = left_base_type;
921 left_operand = convert (operation_type, left_operand);
922 right_operand = convert (operation_type, right_operand);
923 break;
924
925 case GE_EXPR:
926 case LE_EXPR:
927 case GT_EXPR:
928 case LT_EXPR:
929 case EQ_EXPR:
930 case NE_EXPR:
931 #ifdef ENABLE_CHECKING
932 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
933 #endif
934 /* If either operand is a NULL_EXPR, just return a new one. */
935 if (TREE_CODE (left_operand) == NULL_EXPR)
936 return build2 (op_code, result_type,
937 build1 (NULL_EXPR, integer_type_node,
938 TREE_OPERAND (left_operand, 0)),
939 integer_zero_node);
940
941 else if (TREE_CODE (right_operand) == NULL_EXPR)
942 return build2 (op_code, result_type,
943 build1 (NULL_EXPR, integer_type_node,
944 TREE_OPERAND (right_operand, 0)),
945 integer_zero_node);
946
947 /* If either object is a justified modular types, get the
948 fields from within. */
949 if (TREE_CODE (left_type) == RECORD_TYPE
950 && TYPE_JUSTIFIED_MODULAR_P (left_type))
951 {
952 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
953 left_operand);
954 left_type = TREE_TYPE (left_operand);
955 left_base_type = get_base_type (left_type);
956 }
957
958 if (TREE_CODE (right_type) == RECORD_TYPE
959 && TYPE_JUSTIFIED_MODULAR_P (right_type))
960 {
961 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
962 right_operand);
963 right_type = TREE_TYPE (right_operand);
964 right_base_type = get_base_type (right_type);
965 }
966
967 /* If both objects are arrays, compare them specially. */
968 if ((TREE_CODE (left_type) == ARRAY_TYPE
969 || (TREE_CODE (left_type) == INTEGER_TYPE
970 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
971 && (TREE_CODE (right_type) == ARRAY_TYPE
972 || (TREE_CODE (right_type) == INTEGER_TYPE
973 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
974 {
975 result = compare_arrays (input_location,
976 result_type, left_operand, right_operand);
977 if (op_code == NE_EXPR)
978 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
979 else
980 gcc_assert (op_code == EQ_EXPR);
981
982 return result;
983 }
984
985 /* Otherwise, the base types must be the same, unless they are both fat
986 pointer types or record types. In the latter case, use the best type
987 and convert both operands to that type. */
988 if (left_base_type != right_base_type)
989 {
990 if (TYPE_IS_FAT_POINTER_P (left_base_type)
991 && TYPE_IS_FAT_POINTER_P (right_base_type))
992 {
993 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
994 == TYPE_MAIN_VARIANT (right_base_type));
995 best_type = left_base_type;
996 }
997
998 else if (TREE_CODE (left_base_type) == RECORD_TYPE
999 && TREE_CODE (right_base_type) == RECORD_TYPE)
1000 {
1001 /* The only way this is permitted is if both types have the same
1002 name. In that case, one of them must not be self-referential.
1003 Use it as the best type. Even better with a fixed size. */
1004 gcc_assert (TYPE_NAME (left_base_type)
1005 && TYPE_NAME (left_base_type)
1006 == TYPE_NAME (right_base_type));
1007
1008 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1009 best_type = left_base_type;
1010 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1011 best_type = right_base_type;
1012 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1013 best_type = left_base_type;
1014 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1015 best_type = right_base_type;
1016 else
1017 gcc_unreachable ();
1018 }
1019
1020 else
1021 gcc_unreachable ();
1022
1023 left_operand = convert (best_type, left_operand);
1024 right_operand = convert (best_type, right_operand);
1025 }
1026 else
1027 {
1028 left_operand = convert (left_base_type, left_operand);
1029 right_operand = convert (right_base_type, right_operand);
1030 }
1031
1032 /* If both objects are fat pointers, compare them specially. */
1033 if (TYPE_IS_FAT_POINTER_P (left_base_type))
1034 {
1035 result
1036 = compare_fat_pointers (input_location,
1037 result_type, left_operand, right_operand);
1038 if (op_code == NE_EXPR)
1039 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1040 else
1041 gcc_assert (op_code == EQ_EXPR);
1042
1043 return result;
1044 }
1045
1046 modulus = NULL_TREE;
1047 break;
1048
1049 case LSHIFT_EXPR:
1050 case RSHIFT_EXPR:
1051 case LROTATE_EXPR:
1052 case RROTATE_EXPR:
1053 /* The RHS of a shift can be any type. Also, ignore any modulus
1054 (we used to abort, but this is needed for unchecked conversion
1055 to modular types). Otherwise, processing is the same as normal. */
1056 gcc_assert (operation_type == left_base_type);
1057 modulus = NULL_TREE;
1058 left_operand = convert (operation_type, left_operand);
1059 break;
1060
1061 case BIT_AND_EXPR:
1062 case BIT_IOR_EXPR:
1063 case BIT_XOR_EXPR:
1064 /* For binary modulus, if the inputs are in range, so are the
1065 outputs. */
1066 if (modulus && integer_pow2p (modulus))
1067 modulus = NULL_TREE;
1068 goto common;
1069
1070 case COMPLEX_EXPR:
1071 gcc_assert (TREE_TYPE (result_type) == left_base_type
1072 && TREE_TYPE (result_type) == right_base_type);
1073 left_operand = convert (left_base_type, left_operand);
1074 right_operand = convert (right_base_type, right_operand);
1075 break;
1076
1077 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1078 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1079 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1080 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1081 /* These always produce results lower than either operand. */
1082 modulus = NULL_TREE;
1083 goto common;
1084
1085 case POINTER_PLUS_EXPR:
1086 gcc_assert (operation_type == left_base_type
1087 && sizetype == right_base_type);
1088 left_operand = convert (operation_type, left_operand);
1089 right_operand = convert (sizetype, right_operand);
1090 break;
1091
1092 case PLUS_NOMOD_EXPR:
1093 case MINUS_NOMOD_EXPR:
1094 if (op_code == PLUS_NOMOD_EXPR)
1095 op_code = PLUS_EXPR;
1096 else
1097 op_code = MINUS_EXPR;
1098 modulus = NULL_TREE;
1099
1100 /* ... fall through ... */
1101
1102 case PLUS_EXPR:
1103 case MINUS_EXPR:
1104 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1105 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1106 these types but can generate addition/subtraction for Succ/Pred. */
1107 if (operation_type
1108 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1109 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1110 operation_type = left_base_type = right_base_type
1111 = gnat_type_for_mode (TYPE_MODE (operation_type),
1112 TYPE_UNSIGNED (operation_type));
1113
1114 /* ... fall through ... */
1115
1116 default:
1117 common:
1118 /* The result type should be the same as the base types of the
1119 both operands (and they should be the same). Convert
1120 everything to the result type. */
1121
1122 gcc_assert (operation_type == left_base_type
1123 && left_base_type == right_base_type);
1124 left_operand = convert (operation_type, left_operand);
1125 right_operand = convert (operation_type, right_operand);
1126 }
1127
1128 if (modulus && !integer_pow2p (modulus))
1129 {
1130 result = nonbinary_modular_operation (op_code, operation_type,
1131 left_operand, right_operand);
1132 modulus = NULL_TREE;
1133 }
1134 /* If either operand is a NULL_EXPR, just return a new one. */
1135 else if (TREE_CODE (left_operand) == NULL_EXPR)
1136 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1137 else if (TREE_CODE (right_operand) == NULL_EXPR)
1138 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1139 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1140 result = fold (build4 (op_code, operation_type, left_operand,
1141 right_operand, NULL_TREE, NULL_TREE));
1142 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1143 result = build2 (op_code, void_type_node, left_operand, right_operand);
1144 else
1145 result
1146 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1147
1148 if (TREE_CONSTANT (result))
1149 ;
1150 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1151 {
1152 TREE_THIS_NOTRAP (result) = 1;
1153 if (TYPE_VOLATILE (operation_type))
1154 TREE_THIS_VOLATILE (result) = 1;
1155 }
1156 else
1157 TREE_CONSTANT (result)
1158 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1159
1160 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1161
1162 /* If we are working with modular types, perform the MOD operation
1163 if something above hasn't eliminated the need for it. */
1164 if (modulus)
1165 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1166 convert (operation_type, modulus));
1167
1168 if (result_type && result_type != operation_type)
1169 result = convert (result_type, result);
1170
1171 return result;
1172 }
1173 \f
1174 /* Similar, but for unary operations. */
1175
1176 tree
1177 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1178 {
1179 tree type = TREE_TYPE (operand);
1180 tree base_type = get_base_type (type);
1181 tree operation_type = result_type;
1182 tree result;
1183
1184 if (operation_type
1185 && TREE_CODE (operation_type) == RECORD_TYPE
1186 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1187 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1188
1189 if (operation_type
1190 && TREE_CODE (operation_type) == INTEGER_TYPE
1191 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1192 operation_type = get_base_type (operation_type);
1193
1194 switch (op_code)
1195 {
1196 case REALPART_EXPR:
1197 case IMAGPART_EXPR:
1198 if (!operation_type)
1199 result_type = operation_type = TREE_TYPE (type);
1200 else
1201 gcc_assert (result_type == TREE_TYPE (type));
1202
1203 result = fold_build1 (op_code, operation_type, operand);
1204 break;
1205
1206 case TRUTH_NOT_EXPR:
1207 #ifdef ENABLE_CHECKING
1208 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1209 #endif
1210 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1211 /* When not optimizing, fold the result as invert_truthvalue_loc
1212 doesn't fold the result of comparisons. This is intended to undo
1213 the trick used for boolean rvalues in gnat_to_gnu. */
1214 if (!optimize)
1215 result = fold (result);
1216 break;
1217
1218 case ATTR_ADDR_EXPR:
1219 case ADDR_EXPR:
1220 switch (TREE_CODE (operand))
1221 {
1222 case INDIRECT_REF:
1223 case UNCONSTRAINED_ARRAY_REF:
1224 result = TREE_OPERAND (operand, 0);
1225
1226 /* Make sure the type here is a pointer, not a reference.
1227 GCC wants pointer types for function addresses. */
1228 if (!result_type)
1229 result_type = build_pointer_type (type);
1230
1231 /* If the underlying object can alias everything, propagate the
1232 property since we are effectively retrieving the object. */
1233 if (POINTER_TYPE_P (TREE_TYPE (result))
1234 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1235 {
1236 if (TREE_CODE (result_type) == POINTER_TYPE
1237 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1238 result_type
1239 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1240 TYPE_MODE (result_type),
1241 true);
1242 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1243 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1244 result_type
1245 = build_reference_type_for_mode (TREE_TYPE (result_type),
1246 TYPE_MODE (result_type),
1247 true);
1248 }
1249 break;
1250
1251 case NULL_EXPR:
1252 result = operand;
1253 TREE_TYPE (result) = type = build_pointer_type (type);
1254 break;
1255
1256 case COMPOUND_EXPR:
1257 /* Fold a compound expression if it has unconstrained array type
1258 since the middle-end cannot handle it. But we don't it in the
1259 general case because it may introduce aliasing issues if the
1260 first operand is an indirect assignment and the second operand
1261 the corresponding address, e.g. for an allocator. */
1262 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1263 {
1264 result = build_unary_op (ADDR_EXPR, result_type,
1265 TREE_OPERAND (operand, 1));
1266 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1267 TREE_OPERAND (operand, 0), result);
1268 break;
1269 }
1270 goto common;
1271
1272 case ARRAY_REF:
1273 case ARRAY_RANGE_REF:
1274 case COMPONENT_REF:
1275 case BIT_FIELD_REF:
1276 /* If this is for 'Address, find the address of the prefix and add
1277 the offset to the field. Otherwise, do this the normal way. */
1278 if (op_code == ATTR_ADDR_EXPR)
1279 {
1280 HOST_WIDE_INT bitsize;
1281 HOST_WIDE_INT bitpos;
1282 tree offset, inner;
1283 enum machine_mode mode;
1284 int unsignedp, volatilep;
1285
1286 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1287 &mode, &unsignedp, &volatilep,
1288 false);
1289
1290 /* If INNER is a padding type whose field has a self-referential
1291 size, convert to that inner type. We know the offset is zero
1292 and we need to have that type visible. */
1293 if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
1294 && CONTAINS_PLACEHOLDER_P
1295 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1296 (TREE_TYPE (inner))))))
1297 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1298 inner);
1299
1300 /* Compute the offset as a byte offset from INNER. */
1301 if (!offset)
1302 offset = size_zero_node;
1303
1304 offset = size_binop (PLUS_EXPR, offset,
1305 size_int (bitpos / BITS_PER_UNIT));
1306
1307 /* Take the address of INNER, convert the offset to void *, and
1308 add then. It will later be converted to the desired result
1309 type, if any. */
1310 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1311 inner = convert (ptr_void_type_node, inner);
1312 result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1313 inner, offset);
1314 result = convert (build_pointer_type (TREE_TYPE (operand)),
1315 result);
1316 break;
1317 }
1318 goto common;
1319
1320 case CONSTRUCTOR:
1321 /* If this is just a constructor for a padded record, we can
1322 just take the address of the single field and convert it to
1323 a pointer to our type. */
1324 if (TYPE_IS_PADDING_P (type))
1325 {
1326 result = VEC_index (constructor_elt,
1327 CONSTRUCTOR_ELTS (operand),
1328 0)->value;
1329 result = convert (build_pointer_type (TREE_TYPE (operand)),
1330 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1331 break;
1332 }
1333
1334 goto common;
1335
1336 case NOP_EXPR:
1337 if (AGGREGATE_TYPE_P (type)
1338 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1339 return build_unary_op (ADDR_EXPR, result_type,
1340 TREE_OPERAND (operand, 0));
1341
1342 /* ... fallthru ... */
1343
1344 case VIEW_CONVERT_EXPR:
1345 /* If this just a variant conversion or if the conversion doesn't
1346 change the mode, get the result type from this type and go down.
1347 This is needed for conversions of CONST_DECLs, to eventually get
1348 to the address of their CORRESPONDING_VARs. */
1349 if ((TYPE_MAIN_VARIANT (type)
1350 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1351 || (TYPE_MODE (type) != BLKmode
1352 && (TYPE_MODE (type)
1353 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1354 return build_unary_op (ADDR_EXPR,
1355 (result_type ? result_type
1356 : build_pointer_type (type)),
1357 TREE_OPERAND (operand, 0));
1358 goto common;
1359
1360 case CONST_DECL:
1361 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1362
1363 /* ... fall through ... */
1364
1365 default:
1366 common:
1367
1368 /* If we are taking the address of a padded record whose field is
1369 contains a template, take the address of the template. */
1370 if (TYPE_IS_PADDING_P (type)
1371 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1372 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1373 {
1374 type = TREE_TYPE (TYPE_FIELDS (type));
1375 operand = convert (type, operand);
1376 }
1377
1378 gnat_mark_addressable (operand);
1379 result = build_fold_addr_expr (operand);
1380 }
1381
1382 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1383 break;
1384
1385 case INDIRECT_REF:
1386 {
1387 tree t = remove_conversions (operand, false);
1388 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1389
1390 /* If TYPE is a thin pointer, first convert to the fat pointer. */
1391 if (TYPE_IS_THIN_POINTER_P (type)
1392 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
1393 {
1394 operand = convert
1395 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1396 operand);
1397 type = TREE_TYPE (operand);
1398 }
1399
1400 /* If we want to refer to an unconstrained array, use the appropriate
1401 expression. But this will never survive down to the back-end. */
1402 if (TYPE_IS_FAT_POINTER_P (type))
1403 {
1404 result = build1 (UNCONSTRAINED_ARRAY_REF,
1405 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1406 TREE_READONLY (result)
1407 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1408 }
1409
1410 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1411 else if (TREE_CODE (operand) == ADDR_EXPR)
1412 result = TREE_OPERAND (operand, 0);
1413
1414 /* Otherwise, build and fold the indirect reference. */
1415 else
1416 {
1417 result = build_fold_indirect_ref (operand);
1418 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1419 }
1420
1421 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1422 {
1423 TREE_SIDE_EFFECTS (result) = 1;
1424 if (TREE_CODE (result) == INDIRECT_REF)
1425 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1426 }
1427
1428 if ((TREE_CODE (result) == INDIRECT_REF
1429 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1430 && can_never_be_null)
1431 TREE_THIS_NOTRAP (result) = 1;
1432
1433 break;
1434 }
1435
1436 case NEGATE_EXPR:
1437 case BIT_NOT_EXPR:
1438 {
1439 tree modulus = ((operation_type
1440 && TREE_CODE (operation_type) == INTEGER_TYPE
1441 && TYPE_MODULAR_P (operation_type))
1442 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1443 int mod_pow2 = modulus && integer_pow2p (modulus);
1444
1445 /* If this is a modular type, there are various possibilities
1446 depending on the operation and whether the modulus is a
1447 power of two or not. */
1448
1449 if (modulus)
1450 {
1451 gcc_assert (operation_type == base_type);
1452 operand = convert (operation_type, operand);
1453
1454 /* The fastest in the negate case for binary modulus is
1455 the straightforward code; the TRUNC_MOD_EXPR below
1456 is an AND operation. */
1457 if (op_code == NEGATE_EXPR && mod_pow2)
1458 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1459 fold_build1 (NEGATE_EXPR, operation_type,
1460 operand),
1461 modulus);
1462
1463 /* For nonbinary negate case, return zero for zero operand,
1464 else return the modulus minus the operand. If the modulus
1465 is a power of two minus one, we can do the subtraction
1466 as an XOR since it is equivalent and faster on most machines. */
1467 else if (op_code == NEGATE_EXPR && !mod_pow2)
1468 {
1469 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1470 modulus,
1471 convert (operation_type,
1472 integer_one_node))))
1473 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1474 operand, modulus);
1475 else
1476 result = fold_build2 (MINUS_EXPR, operation_type,
1477 modulus, operand);
1478
1479 result = fold_build3 (COND_EXPR, operation_type,
1480 fold_build2 (NE_EXPR,
1481 boolean_type_node,
1482 operand,
1483 convert
1484 (operation_type,
1485 integer_zero_node)),
1486 result, operand);
1487 }
1488 else
1489 {
1490 /* For the NOT cases, we need a constant equal to
1491 the modulus minus one. For a binary modulus, we
1492 XOR against the constant and subtract the operand from
1493 that constant for nonbinary modulus. */
1494
1495 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1496 convert (operation_type,
1497 integer_one_node));
1498
1499 if (mod_pow2)
1500 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1501 operand, cnst);
1502 else
1503 result = fold_build2 (MINUS_EXPR, operation_type,
1504 cnst, operand);
1505 }
1506
1507 break;
1508 }
1509 }
1510
1511 /* ... fall through ... */
1512
1513 default:
1514 gcc_assert (operation_type == base_type);
1515 result = fold_build1 (op_code, operation_type,
1516 convert (operation_type, operand));
1517 }
1518
1519 if (result_type && TREE_TYPE (result) != result_type)
1520 result = convert (result_type, result);
1521
1522 return result;
1523 }
1524 \f
1525 /* Similar, but for COND_EXPR. */
1526
1527 tree
1528 build_cond_expr (tree result_type, tree condition_operand,
1529 tree true_operand, tree false_operand)
1530 {
1531 bool addr_p = false;
1532 tree result;
1533
1534 /* The front-end verified that result, true and false operands have
1535 same base type. Convert everything to the result type. */
1536 true_operand = convert (result_type, true_operand);
1537 false_operand = convert (result_type, false_operand);
1538
1539 /* If the result type is unconstrained, take the address of the operands and
1540 then dereference the result. Likewise if the result type is passed by
1541 reference, but this is natively handled in the gimplifier. */
1542 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1543 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1544 {
1545 result_type = build_pointer_type (result_type);
1546 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1547 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1548 addr_p = true;
1549 }
1550
1551 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1552 true_operand, false_operand);
1553
1554 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1555 in both arms, make sure it gets evaluated by moving it ahead of the
1556 conditional expression. This is necessary because it is evaluated
1557 in only one place at run time and would otherwise be uninitialized
1558 in one of the arms. */
1559 true_operand = skip_simple_arithmetic (true_operand);
1560 false_operand = skip_simple_arithmetic (false_operand);
1561
1562 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1563 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1564
1565 if (addr_p)
1566 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1567
1568 return result;
1569 }
1570
1571 /* Similar, but for COMPOUND_EXPR. */
1572
1573 tree
1574 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1575 {
1576 bool addr_p = false;
1577 tree result;
1578
1579 /* If the result type is unconstrained, take the address of the operand and
1580 then dereference the result. Likewise if the result type is passed by
1581 reference, but this is natively handled in the gimplifier. */
1582 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1583 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1584 {
1585 result_type = build_pointer_type (result_type);
1586 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1587 addr_p = true;
1588 }
1589
1590 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1591 expr_operand);
1592
1593 if (addr_p)
1594 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1595
1596 return result;
1597 }
1598 \f
1599 /* Conveniently construct a function call expression. FNDECL names the
1600 function to be called, N is the number of arguments, and the "..."
1601 parameters are the argument expressions. Unlike build_call_expr
1602 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1603
1604 tree
1605 build_call_n_expr (tree fndecl, int n, ...)
1606 {
1607 va_list ap;
1608 tree fntype = TREE_TYPE (fndecl);
1609 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1610
1611 va_start (ap, n);
1612 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1613 va_end (ap);
1614 return fn;
1615 }
1616 \f
1617 /* Call a function that raises an exception and pass the line number and file
1618 name, if requested. MSG says which exception function to call.
1619
1620 GNAT_NODE is the gnat node conveying the source location for which the
1621 error should be signaled, or Empty in which case the error is signaled on
1622 the current ref_file_name/input_line.
1623
1624 KIND says which kind of exception this is for
1625 (N_Raise_{Constraint,Storage,Program}_Error). */
1626
1627 tree
1628 build_call_raise (int msg, Node_Id gnat_node, char kind)
1629 {
1630 tree fndecl = gnat_raise_decls[msg];
1631 tree label = get_exception_label (kind);
1632 tree filename;
1633 int line_number;
1634 const char *str;
1635 int len;
1636
1637 /* If this is to be done as a goto, handle that case. */
1638 if (label)
1639 {
1640 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1641 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1642
1643 /* If Local_Raise is present, generate
1644 Local_Raise (exception'Identity); */
1645 if (Present (local_raise))
1646 {
1647 tree gnu_local_raise
1648 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1649 tree gnu_exception_entity
1650 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1651 tree gnu_call
1652 = build_call_n_expr (gnu_local_raise, 1,
1653 build_unary_op (ADDR_EXPR, NULL_TREE,
1654 gnu_exception_entity));
1655
1656 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1657 gnu_call, gnu_result);}
1658
1659 return gnu_result;
1660 }
1661
1662 str
1663 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1664 ? ""
1665 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1666 ? IDENTIFIER_POINTER
1667 (get_identifier (Get_Name_String
1668 (Debug_Source_Name
1669 (Get_Source_File_Index (Sloc (gnat_node))))))
1670 : ref_filename;
1671
1672 len = strlen (str);
1673 filename = build_string (len, str);
1674 line_number
1675 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1676 ? Get_Logical_Line_Number (Sloc(gnat_node)) : input_line;
1677
1678 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1679 build_index_type (size_int (len)));
1680
1681 return
1682 build_call_n_expr (fndecl, 2,
1683 build1 (ADDR_EXPR,
1684 build_pointer_type (unsigned_char_type_node),
1685 filename),
1686 build_int_cst (NULL_TREE, line_number));
1687 }
1688
1689 /* Similar to build_call_raise, for an index or range check exception as
1690 determined by MSG, with extra information generated of the form
1691 "INDEX out of range FIRST..LAST". */
1692
1693 tree
1694 build_call_raise_range (int msg, Node_Id gnat_node,
1695 tree index, tree first, tree last)
1696 {
1697 tree fndecl = gnat_raise_decls_ext[msg];
1698 tree filename;
1699 int line_number, column_number;
1700 const char *str;
1701 int len;
1702
1703 str
1704 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1705 ? ""
1706 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1707 ? IDENTIFIER_POINTER
1708 (get_identifier (Get_Name_String
1709 (Debug_Source_Name
1710 (Get_Source_File_Index (Sloc (gnat_node))))))
1711 : ref_filename;
1712
1713 len = strlen (str);
1714 filename = build_string (len, str);
1715 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1716 {
1717 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1718 column_number = Get_Column_Number (Sloc (gnat_node));
1719 }
1720 else
1721 {
1722 line_number = input_line;
1723 column_number = 0;
1724 }
1725
1726 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1727 build_index_type (size_int (len)));
1728
1729 return
1730 build_call_n_expr (fndecl, 6,
1731 build1 (ADDR_EXPR,
1732 build_pointer_type (unsigned_char_type_node),
1733 filename),
1734 build_int_cst (NULL_TREE, line_number),
1735 build_int_cst (NULL_TREE, column_number),
1736 convert (integer_type_node, index),
1737 convert (integer_type_node, first),
1738 convert (integer_type_node, last));
1739 }
1740
1741 /* Similar to build_call_raise, with extra information about the column
1742 where the check failed. */
1743
1744 tree
1745 build_call_raise_column (int msg, Node_Id gnat_node)
1746 {
1747 tree fndecl = gnat_raise_decls_ext[msg];
1748 tree filename;
1749 int line_number, column_number;
1750 const char *str;
1751 int len;
1752
1753 str
1754 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1755 ? ""
1756 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1757 ? IDENTIFIER_POINTER
1758 (get_identifier (Get_Name_String
1759 (Debug_Source_Name
1760 (Get_Source_File_Index (Sloc (gnat_node))))))
1761 : ref_filename;
1762
1763 len = strlen (str);
1764 filename = build_string (len, str);
1765 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1766 {
1767 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1768 column_number = Get_Column_Number (Sloc (gnat_node));
1769 }
1770 else
1771 {
1772 line_number = input_line;
1773 column_number = 0;
1774 }
1775
1776 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1777 build_index_type (size_int (len)));
1778
1779 return
1780 build_call_n_expr (fndecl, 3,
1781 build1 (ADDR_EXPR,
1782 build_pointer_type (unsigned_char_type_node),
1783 filename),
1784 build_int_cst (NULL_TREE, line_number),
1785 build_int_cst (NULL_TREE, column_number));
1786 }
1787 \f
1788 /* qsort comparer for the bit positions of two constructor elements
1789 for record components. */
1790
1791 static int
1792 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1793 {
1794 const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
1795 const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
1796 const_tree const field1 = elmt1->index;
1797 const_tree const field2 = elmt2->index;
1798 const int ret
1799 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1800
1801 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1802 }
1803
1804 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1805
1806 tree
1807 gnat_build_constructor (tree type, VEC(constructor_elt,gc) *v)
1808 {
1809 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1810 bool side_effects = false;
1811 tree result, obj, val;
1812 unsigned int n_elmts;
1813
1814 /* Scan the elements to see if they are all constant or if any has side
1815 effects, to let us set global flags on the resulting constructor. Count
1816 the elements along the way for possible sorting purposes below. */
1817 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1818 {
1819 /* The predicate must be in keeping with output_constructor. */
1820 if (!TREE_CONSTANT (val)
1821 || (TREE_CODE (type) == RECORD_TYPE
1822 && CONSTRUCTOR_BITFIELD_P (obj)
1823 && !initializer_constant_valid_for_bitfield_p (val))
1824 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1825 allconstant = false;
1826
1827 if (TREE_SIDE_EFFECTS (val))
1828 side_effects = true;
1829 }
1830
1831 /* For record types with constant components only, sort field list
1832 by increasing bit position. This is necessary to ensure the
1833 constructor can be output as static data. */
1834 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1835 VEC_qsort (constructor_elt, v, compare_elmt_bitpos);
1836
1837 result = build_constructor (type, v);
1838 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1839 TREE_SIDE_EFFECTS (result) = side_effects;
1840 TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1841 return result;
1842 }
1843 \f
1844 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1845 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1846 for the field. Don't fold the result if NO_FOLD_P is true.
1847
1848 We also handle the fact that we might have been passed a pointer to the
1849 actual record and know how to look for fields in variant parts. */
1850
1851 static tree
1852 build_simple_component_ref (tree record_variable, tree component,
1853 tree field, bool no_fold_p)
1854 {
1855 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1856 tree ref, inner_variable;
1857
1858 gcc_assert (RECORD_OR_UNION_TYPE_P (record_type)
1859 && COMPLETE_TYPE_P (record_type)
1860 && (component == NULL_TREE) != (field == NULL_TREE));
1861
1862 /* If no field was specified, look for a field with the specified name in
1863 the current record only. */
1864 if (!field)
1865 for (field = TYPE_FIELDS (record_type);
1866 field;
1867 field = DECL_CHAIN (field))
1868 if (DECL_NAME (field) == component)
1869 break;
1870
1871 if (!field)
1872 return NULL_TREE;
1873
1874 /* If this field is not in the specified record, see if we can find a field
1875 in the specified record whose original field is the same as this one. */
1876 if (DECL_CONTEXT (field) != record_type)
1877 {
1878 tree new_field;
1879
1880 /* First loop thru normal components. */
1881 for (new_field = TYPE_FIELDS (record_type);
1882 new_field;
1883 new_field = DECL_CHAIN (new_field))
1884 if (SAME_FIELD_P (field, new_field))
1885 break;
1886
1887 /* Next, see if we're looking for an inherited component in an extension.
1888 If so, look thru the extension directly. */
1889 if (!new_field
1890 && TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1891 && TYPE_ALIGN_OK (record_type)
1892 && TREE_CODE (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1893 == RECORD_TYPE
1894 && TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (record_variable, 0))))
1895 {
1896 ref = build_simple_component_ref (TREE_OPERAND (record_variable, 0),
1897 NULL_TREE, field, no_fold_p);
1898 if (ref)
1899 return ref;
1900 }
1901
1902 /* Next, loop thru DECL_INTERNAL_P components if we haven't found the
1903 component in the first search. Doing this search in two steps is
1904 required to avoid hidden homonymous fields in the _Parent field. */
1905 if (!new_field)
1906 for (new_field = TYPE_FIELDS (record_type);
1907 new_field;
1908 new_field = DECL_CHAIN (new_field))
1909 if (DECL_INTERNAL_P (new_field))
1910 {
1911 tree field_ref
1912 = build_simple_component_ref (record_variable,
1913 NULL_TREE, new_field, no_fold_p);
1914 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1915 no_fold_p);
1916 if (ref)
1917 return ref;
1918 }
1919
1920 field = new_field;
1921 }
1922
1923 if (!field)
1924 return NULL_TREE;
1925
1926 /* If the field's offset has overflowed, do not try to access it, as doing
1927 so may trigger sanity checks deeper in the back-end. Note that we don't
1928 need to warn since this will be done on trying to declare the object. */
1929 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1930 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1931 return NULL_TREE;
1932
1933 /* Look through conversion between type variants. This is transparent as
1934 far as the field is concerned. */
1935 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1936 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1937 == record_type)
1938 inner_variable = TREE_OPERAND (record_variable, 0);
1939 else
1940 inner_variable = record_variable;
1941
1942 ref = build3 (COMPONENT_REF, TREE_TYPE (field), inner_variable, field,
1943 NULL_TREE);
1944
1945 if (TREE_READONLY (record_variable)
1946 || TREE_READONLY (field)
1947 || TYPE_READONLY (record_type))
1948 TREE_READONLY (ref) = 1;
1949
1950 if (TREE_THIS_VOLATILE (record_variable)
1951 || TREE_THIS_VOLATILE (field)
1952 || TYPE_VOLATILE (record_type))
1953 TREE_THIS_VOLATILE (ref) = 1;
1954
1955 if (no_fold_p)
1956 return ref;
1957
1958 /* The generic folder may punt in this case because the inner array type
1959 can be self-referential, but folding is in fact not problematic. */
1960 if (TREE_CODE (record_variable) == CONSTRUCTOR
1961 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record_variable)))
1962 {
1963 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (record_variable);
1964 unsigned HOST_WIDE_INT idx;
1965 tree index, value;
1966 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
1967 if (index == field)
1968 return value;
1969 return ref;
1970 }
1971
1972 return fold (ref);
1973 }
1974 \f
1975 /* Like build_simple_component_ref, except that we give an error if the
1976 reference could not be found. */
1977
1978 tree
1979 build_component_ref (tree record_variable, tree component,
1980 tree field, bool no_fold_p)
1981 {
1982 tree ref = build_simple_component_ref (record_variable, component, field,
1983 no_fold_p);
1984
1985 if (ref)
1986 return ref;
1987
1988 /* If FIELD was specified, assume this is an invalid user field so raise
1989 Constraint_Error. Otherwise, we have no type to return so abort. */
1990 gcc_assert (field);
1991 return build1 (NULL_EXPR, TREE_TYPE (field),
1992 build_call_raise (CE_Discriminant_Check_Failed, Empty,
1993 N_Raise_Constraint_Error));
1994 }
1995 \f
1996 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
1997 identically. Process the case where a GNAT_PROC to call is provided. */
1998
1999 static inline tree
2000 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2001 Entity_Id gnat_proc, Entity_Id gnat_pool)
2002 {
2003 tree gnu_proc = gnat_to_gnu (gnat_proc);
2004 tree gnu_call;
2005
2006 /* The storage pools are obviously always tagged types, but the
2007 secondary stack uses the same mechanism and is not tagged. */
2008 if (Is_Tagged_Type (Etype (gnat_pool)))
2009 {
2010 /* The size is the third parameter; the alignment is the
2011 same type. */
2012 Entity_Id gnat_size_type
2013 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2014 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2015
2016 tree gnu_pool = gnat_to_gnu (gnat_pool);
2017 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2018 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2019
2020 gnu_size = convert (gnu_size_type, gnu_size);
2021 gnu_align = convert (gnu_size_type, gnu_align);
2022
2023 /* The first arg is always the address of the storage pool; next
2024 comes the address of the object, for a deallocator, then the
2025 size and alignment. */
2026 if (gnu_obj)
2027 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2028 gnu_size, gnu_align);
2029 else
2030 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2031 gnu_size, gnu_align);
2032 }
2033
2034 /* Secondary stack case. */
2035 else
2036 {
2037 /* The size is the second parameter. */
2038 Entity_Id gnat_size_type
2039 = Etype (Next_Formal (First_Formal (gnat_proc)));
2040 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2041
2042 gnu_size = convert (gnu_size_type, gnu_size);
2043
2044 /* The first arg is the address of the object, for a deallocator,
2045 then the size. */
2046 if (gnu_obj)
2047 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2048 else
2049 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2050 }
2051
2052 return gnu_call;
2053 }
2054
2055 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2056 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2057 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2058 latter offers. */
2059
2060 static inline tree
2061 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2062 {
2063 /* When the DATA_TYPE alignment is stricter than what malloc offers
2064 (super-aligned case), we allocate an "aligning" wrapper type and return
2065 the address of its single data field with the malloc's return value
2066 stored just in front. */
2067
2068 unsigned int data_align = TYPE_ALIGN (data_type);
2069 unsigned int system_allocator_alignment
2070 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2071
2072 tree aligning_type
2073 = ((data_align > system_allocator_alignment)
2074 ? make_aligning_type (data_type, data_align, data_size,
2075 system_allocator_alignment,
2076 POINTER_SIZE / BITS_PER_UNIT)
2077 : NULL_TREE);
2078
2079 tree size_to_malloc
2080 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2081
2082 tree malloc_ptr;
2083
2084 /* On VMS, if pointers are 64-bit and the allocator size is 32-bit or
2085 Convention C, allocate 32-bit memory. */
2086 if (TARGET_ABI_OPEN_VMS
2087 && POINTER_SIZE == 64
2088 && Nkind (gnat_node) == N_Allocator
2089 && (UI_To_Int (Esize (Etype (gnat_node))) == 32
2090 || Convention (Etype (gnat_node)) == Convention_C))
2091 malloc_ptr = build_call_n_expr (malloc32_decl, 1, size_to_malloc);
2092 else
2093 malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2094
2095 if (aligning_type)
2096 {
2097 /* Latch malloc's return value and get a pointer to the aligning field
2098 first. */
2099 tree storage_ptr = gnat_protect_expr (malloc_ptr);
2100
2101 tree aligning_record_addr
2102 = convert (build_pointer_type (aligning_type), storage_ptr);
2103
2104 tree aligning_record
2105 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2106
2107 tree aligning_field
2108 = build_component_ref (aligning_record, NULL_TREE,
2109 TYPE_FIELDS (aligning_type), false);
2110
2111 tree aligning_field_addr
2112 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2113
2114 /* Then arrange to store the allocator's return value ahead
2115 and return. */
2116 tree storage_ptr_slot_addr
2117 = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
2118 convert (ptr_void_type_node, aligning_field_addr),
2119 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2120 / BITS_PER_UNIT));
2121
2122 tree storage_ptr_slot
2123 = build_unary_op (INDIRECT_REF, NULL_TREE,
2124 convert (build_pointer_type (ptr_void_type_node),
2125 storage_ptr_slot_addr));
2126
2127 return
2128 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2129 build_binary_op (MODIFY_EXPR, NULL_TREE,
2130 storage_ptr_slot, storage_ptr),
2131 aligning_field_addr);
2132 }
2133 else
2134 return malloc_ptr;
2135 }
2136
2137 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2138 designated by DATA_PTR using the __gnat_free entry point. */
2139
2140 static inline tree
2141 maybe_wrap_free (tree data_ptr, tree data_type)
2142 {
2143 /* In the regular alignment case, we pass the data pointer straight to free.
2144 In the superaligned case, we need to retrieve the initial allocator
2145 return value, stored in front of the data block at allocation time. */
2146
2147 unsigned int data_align = TYPE_ALIGN (data_type);
2148 unsigned int system_allocator_alignment
2149 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2150
2151 tree free_ptr;
2152
2153 if (data_align > system_allocator_alignment)
2154 {
2155 /* DATA_FRONT_PTR (void *)
2156 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2157 tree data_front_ptr
2158 = build_binary_op
2159 (POINTER_PLUS_EXPR, ptr_void_type_node,
2160 convert (ptr_void_type_node, data_ptr),
2161 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2162
2163 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2164 free_ptr
2165 = build_unary_op
2166 (INDIRECT_REF, NULL_TREE,
2167 convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
2168 }
2169 else
2170 free_ptr = data_ptr;
2171
2172 return build_call_n_expr (free_decl, 1, free_ptr);
2173 }
2174
2175 /* Build a GCC tree to call an allocation or deallocation function.
2176 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2177 generate an allocator.
2178
2179 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2180 object type, used to determine the to-be-honored address alignment.
2181 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2182 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2183 to provide an error location for restriction violation messages. */
2184
2185 tree
2186 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2187 Entity_Id gnat_proc, Entity_Id gnat_pool,
2188 Node_Id gnat_node)
2189 {
2190 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
2191
2192 /* Explicit proc to call ? This one is assumed to deal with the type
2193 alignment constraints. */
2194 if (Present (gnat_proc))
2195 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2196 gnat_proc, gnat_pool);
2197
2198 /* Otherwise, object to "free" or "malloc" with possible special processing
2199 for alignments stricter than what the default allocator honors. */
2200 else if (gnu_obj)
2201 return maybe_wrap_free (gnu_obj, gnu_type);
2202 else
2203 {
2204 /* Assert that we no longer can be called with this special pool. */
2205 gcc_assert (gnat_pool != -1);
2206
2207 /* Check that we aren't violating the associated restriction. */
2208 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2209 Check_No_Implicit_Heap_Alloc (gnat_node);
2210
2211 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2212 }
2213 }
2214 \f
2215 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2216 initial value is INIT, if INIT is nonzero. Convert the expression to
2217 RESULT_TYPE, which must be some pointer type, and return the result.
2218
2219 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2220 the storage pool to use. GNAT_NODE is used to provide an error
2221 location for restriction violation messages. If IGNORE_INIT_TYPE is
2222 true, ignore the type of INIT for the purpose of determining the size;
2223 this will cause the maximum size to be allocated if TYPE is of
2224 self-referential size. */
2225
2226 tree
2227 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2228 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2229 {
2230 tree size, storage, storage_deref, storage_init;
2231
2232 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2233 if (init && TREE_CODE (init) == NULL_EXPR)
2234 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2235
2236 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2237 else if (init && TREE_CODE (init) == COND_EXPR)
2238 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2239 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2240 gnat_proc, gnat_pool, gnat_node,
2241 ignore_init_type),
2242 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2243 gnat_proc, gnat_pool, gnat_node,
2244 ignore_init_type));
2245
2246 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2247 sizes of the object and its template. Allocate the whole thing and
2248 fill in the parts that are known. */
2249 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2250 {
2251 tree storage_type
2252 = build_unc_object_type_from_ptr (result_type, type,
2253 get_identifier ("ALLOC"), false);
2254 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2255 tree storage_ptr_type = build_pointer_type (storage_type);
2256
2257 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2258 init);
2259
2260 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2261 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2262 size = ssize_int (-1);
2263
2264 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2265 gnat_proc, gnat_pool, gnat_node);
2266 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2267 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2268 TREE_THIS_NOTRAP (storage_deref) = 1;
2269
2270 /* If there is an initializing expression, then make a constructor for
2271 the entire object including the bounds and copy it into the object.
2272 If there is no initializing expression, just set the bounds. */
2273 if (init)
2274 {
2275 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
2276
2277 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2278 build_template (template_type, type, init));
2279 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2280 init);
2281 storage_init
2282 = build_binary_op (MODIFY_EXPR, NULL_TREE, storage_deref,
2283 gnat_build_constructor (storage_type, v));
2284 }
2285 else
2286 storage_init
2287 = build_binary_op (MODIFY_EXPR, NULL_TREE,
2288 build_component_ref (storage_deref, NULL_TREE,
2289 TYPE_FIELDS (storage_type),
2290 false),
2291 build_template (template_type, type, NULL_TREE));
2292
2293 return build2 (COMPOUND_EXPR, result_type,
2294 storage_init, convert (result_type, storage));
2295 }
2296
2297 size = TYPE_SIZE_UNIT (type);
2298
2299 /* If we have an initializing expression, see if its size is simpler
2300 than the size from the type. */
2301 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2302 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2303 || CONTAINS_PLACEHOLDER_P (size)))
2304 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2305
2306 /* If the size is still self-referential, reference the initializing
2307 expression, if it is present. If not, this must have been a
2308 call to allocate a library-level object, in which case we use
2309 the maximum size. */
2310 if (CONTAINS_PLACEHOLDER_P (size))
2311 {
2312 if (!ignore_init_type && init)
2313 size = substitute_placeholder_in_expr (size, init);
2314 else
2315 size = max_size (size, true);
2316 }
2317
2318 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2319 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2320 size = ssize_int (-1);
2321
2322 storage = convert (result_type,
2323 build_call_alloc_dealloc (NULL_TREE, size, type,
2324 gnat_proc, gnat_pool,
2325 gnat_node));
2326
2327 /* If we have an initial value, protect the new address, assign the value
2328 and return the address with a COMPOUND_EXPR. */
2329 if (init)
2330 {
2331 storage = gnat_protect_expr (storage);
2332 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2333 TREE_THIS_NOTRAP (storage_deref) = 1;
2334 storage_init
2335 = build_binary_op (MODIFY_EXPR, NULL_TREE, storage_deref, init);
2336 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2337 }
2338
2339 return storage;
2340 }
2341 \f
2342 /* Indicate that we need to take the address of T and that it therefore
2343 should not be allocated in a register. Returns true if successful. */
2344
2345 bool
2346 gnat_mark_addressable (tree t)
2347 {
2348 while (true)
2349 switch (TREE_CODE (t))
2350 {
2351 case ADDR_EXPR:
2352 case COMPONENT_REF:
2353 case ARRAY_REF:
2354 case ARRAY_RANGE_REF:
2355 case REALPART_EXPR:
2356 case IMAGPART_EXPR:
2357 case VIEW_CONVERT_EXPR:
2358 case NON_LVALUE_EXPR:
2359 CASE_CONVERT:
2360 t = TREE_OPERAND (t, 0);
2361 break;
2362
2363 case COMPOUND_EXPR:
2364 t = TREE_OPERAND (t, 1);
2365 break;
2366
2367 case CONSTRUCTOR:
2368 TREE_ADDRESSABLE (t) = 1;
2369 return true;
2370
2371 case VAR_DECL:
2372 case PARM_DECL:
2373 case RESULT_DECL:
2374 TREE_ADDRESSABLE (t) = 1;
2375 return true;
2376
2377 case FUNCTION_DECL:
2378 TREE_ADDRESSABLE (t) = 1;
2379 return true;
2380
2381 case CONST_DECL:
2382 return DECL_CONST_CORRESPONDING_VAR (t)
2383 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2384
2385 default:
2386 return true;
2387 }
2388 }
2389 \f
2390 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2391 but we know how to handle our own nodes. */
2392
2393 tree
2394 gnat_save_expr (tree exp)
2395 {
2396 tree type = TREE_TYPE (exp);
2397 enum tree_code code = TREE_CODE (exp);
2398
2399 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2400 return exp;
2401
2402 if (code == UNCONSTRAINED_ARRAY_REF)
2403 {
2404 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2405 TREE_READONLY (t) = TYPE_READONLY (type);
2406 return t;
2407 }
2408
2409 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2410 This may be more efficient, but will also allow us to more easily find
2411 the match for the PLACEHOLDER_EXPR. */
2412 if (code == COMPONENT_REF
2413 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2414 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2415 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2416
2417 return save_expr (exp);
2418 }
2419
2420 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2421 is optimized under the assumption that EXP's value doesn't change before
2422 its subsequent reuse(s) except through its potential reevaluation. */
2423
2424 tree
2425 gnat_protect_expr (tree exp)
2426 {
2427 tree type = TREE_TYPE (exp);
2428 enum tree_code code = TREE_CODE (exp);
2429
2430 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2431 return exp;
2432
2433 /* If EXP has no side effects, we theoretically don't need to do anything.
2434 However, we may be recursively passed more and more complex expressions
2435 involving checks which will be reused multiple times and eventually be
2436 unshared for gimplification; in order to avoid a complexity explosion
2437 at that point, we protect any expressions more complex than a simple
2438 arithmetic expression. */
2439 if (!TREE_SIDE_EFFECTS (exp))
2440 {
2441 tree inner = skip_simple_arithmetic (exp);
2442 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2443 return exp;
2444 }
2445
2446 /* If this is a conversion, protect what's inside the conversion. */
2447 if (code == NON_LVALUE_EXPR
2448 || CONVERT_EXPR_CODE_P (code)
2449 || code == VIEW_CONVERT_EXPR)
2450 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2451
2452 /* If we're indirectly referencing something, we only need to protect the
2453 address since the data itself can't change in these situations. */
2454 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2455 {
2456 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2457 TREE_READONLY (t) = TYPE_READONLY (type);
2458 return t;
2459 }
2460
2461 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2462 This may be more efficient, but will also allow us to more easily find
2463 the match for the PLACEHOLDER_EXPR. */
2464 if (code == COMPONENT_REF
2465 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2466 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2467 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2468
2469 /* If this is a fat pointer or something that can be placed in a register,
2470 just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
2471 returned via invisible reference in most ABIs so the temporary will
2472 directly be filled by the callee. */
2473 if (TYPE_IS_FAT_POINTER_P (type)
2474 || TYPE_MODE (type) != BLKmode
2475 || code == CALL_EXPR)
2476 return save_expr (exp);
2477
2478 /* Otherwise reference, protect the address and dereference. */
2479 return
2480 build_unary_op (INDIRECT_REF, type,
2481 save_expr (build_unary_op (ADDR_EXPR,
2482 build_reference_type (type),
2483 exp)));
2484 }
2485
2486 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2487 argument to force evaluation of everything. */
2488
2489 static tree
2490 gnat_stabilize_reference_1 (tree e, bool force)
2491 {
2492 enum tree_code code = TREE_CODE (e);
2493 tree type = TREE_TYPE (e);
2494 tree result;
2495
2496 /* We cannot ignore const expressions because it might be a reference
2497 to a const array but whose index contains side-effects. But we can
2498 ignore things that are actual constant or that already have been
2499 handled by this function. */
2500 if (TREE_CONSTANT (e) || code == SAVE_EXPR)
2501 return e;
2502
2503 switch (TREE_CODE_CLASS (code))
2504 {
2505 case tcc_exceptional:
2506 case tcc_declaration:
2507 case tcc_comparison:
2508 case tcc_expression:
2509 case tcc_reference:
2510 case tcc_vl_exp:
2511 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2512 fat pointer. This may be more efficient, but will also allow
2513 us to more easily find the match for the PLACEHOLDER_EXPR. */
2514 if (code == COMPONENT_REF
2515 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2516 result
2517 = build3 (code, type,
2518 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2519 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2520 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2521 so that it will only be evaluated once. */
2522 /* The tcc_reference and tcc_comparison classes could be handled as
2523 below, but it is generally faster to only evaluate them once. */
2524 else if (TREE_SIDE_EFFECTS (e) || force)
2525 return save_expr (e);
2526 else
2527 return e;
2528 break;
2529
2530 case tcc_binary:
2531 /* Recursively stabilize each operand. */
2532 result
2533 = build2 (code, type,
2534 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2535 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), force));
2536 break;
2537
2538 case tcc_unary:
2539 /* Recursively stabilize each operand. */
2540 result
2541 = build1 (code, type,
2542 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force));
2543 break;
2544
2545 default:
2546 gcc_unreachable ();
2547 }
2548
2549 /* See similar handling in gnat_stabilize_reference. */
2550 TREE_READONLY (result) = TREE_READONLY (e);
2551 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2552 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2553
2554 if (code == INDIRECT_REF
2555 || code == UNCONSTRAINED_ARRAY_REF
2556 || code == ARRAY_REF
2557 || code == ARRAY_RANGE_REF)
2558 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (e);
2559
2560 return result;
2561 }
2562
2563 /* This is equivalent to stabilize_reference in tree.c but we know how to
2564 handle our own nodes and we take extra arguments. FORCE says whether to
2565 force evaluation of everything. We set SUCCESS to true unless we walk
2566 through something we don't know how to stabilize. */
2567
2568 tree
2569 gnat_stabilize_reference (tree ref, bool force, bool *success)
2570 {
2571 tree type = TREE_TYPE (ref);
2572 enum tree_code code = TREE_CODE (ref);
2573 tree result;
2574
2575 /* Assume we'll success unless proven otherwise. */
2576 if (success)
2577 *success = true;
2578
2579 switch (code)
2580 {
2581 case CONST_DECL:
2582 case VAR_DECL:
2583 case PARM_DECL:
2584 case RESULT_DECL:
2585 /* No action is needed in this case. */
2586 return ref;
2587
2588 case ADDR_EXPR:
2589 CASE_CONVERT:
2590 case FLOAT_EXPR:
2591 case FIX_TRUNC_EXPR:
2592 case VIEW_CONVERT_EXPR:
2593 result
2594 = build1 (code, type,
2595 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2596 success));
2597 break;
2598
2599 case INDIRECT_REF:
2600 case UNCONSTRAINED_ARRAY_REF:
2601 result = build1 (code, type,
2602 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 0),
2603 force));
2604 break;
2605
2606 case COMPONENT_REF:
2607 result = build3 (COMPONENT_REF, type,
2608 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2609 success),
2610 TREE_OPERAND (ref, 1), NULL_TREE);
2611 break;
2612
2613 case BIT_FIELD_REF:
2614 result = build3 (BIT_FIELD_REF, type,
2615 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2616 success),
2617 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2618 force),
2619 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 2),
2620 force));
2621 break;
2622
2623 case ARRAY_REF:
2624 case ARRAY_RANGE_REF:
2625 result = build4 (code, type,
2626 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2627 success),
2628 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2629 force),
2630 NULL_TREE, NULL_TREE);
2631 break;
2632
2633 case CALL_EXPR:
2634 result = gnat_stabilize_reference_1 (ref, force);
2635 break;
2636
2637 case COMPOUND_EXPR:
2638 result = build2 (COMPOUND_EXPR, type,
2639 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2640 success),
2641 gnat_stabilize_reference (TREE_OPERAND (ref, 1), force,
2642 success));
2643 break;
2644
2645 case CONSTRUCTOR:
2646 /* Constructors with 1 element are used extensively to formally
2647 convert objects to special wrapping types. */
2648 if (TREE_CODE (type) == RECORD_TYPE
2649 && VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ref)) == 1)
2650 {
2651 tree index
2652 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->index;
2653 tree value
2654 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->value;
2655 result
2656 = build_constructor_single (type, index,
2657 gnat_stabilize_reference_1 (value,
2658 force));
2659 }
2660 else
2661 {
2662 if (success)
2663 *success = false;
2664 return ref;
2665 }
2666 break;
2667
2668 case ERROR_MARK:
2669 ref = error_mark_node;
2670
2671 /* ... fall through to failure ... */
2672
2673 /* If arg isn't a kind of lvalue we recognize, make no change.
2674 Caller should recognize the error for an invalid lvalue. */
2675 default:
2676 if (success)
2677 *success = false;
2678 return ref;
2679 }
2680
2681 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2682 may not be sustained across some paths, such as the way via build1 for
2683 INDIRECT_REF. We reset those flags here in the general case, which is
2684 consistent with the GCC version of this routine.
2685
2686 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2687 paths introduce side-effects where there was none initially (e.g. if a
2688 SAVE_EXPR is built) and we also want to keep track of that. */
2689 TREE_READONLY (result) = TREE_READONLY (ref);
2690 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2691 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2692
2693 if (code == INDIRECT_REF
2694 || code == UNCONSTRAINED_ARRAY_REF
2695 || code == ARRAY_REF
2696 || code == ARRAY_RANGE_REF)
2697 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2698
2699 return result;
2700 }
2701
2702 /* If EXPR is an expression that is invariant in the current function, in the
2703 sense that it can be evaluated anywhere in the function and any number of
2704 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2705
2706 tree
2707 gnat_invariant_expr (tree expr)
2708 {
2709 tree type = TREE_TYPE (expr), t;
2710
2711 expr = remove_conversions (expr, false);
2712
2713 while ((TREE_CODE (expr) == CONST_DECL
2714 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2715 && decl_function_context (expr) == current_function_decl
2716 && DECL_INITIAL (expr))
2717 expr = remove_conversions (DECL_INITIAL (expr), false);
2718
2719 if (TREE_CONSTANT (expr))
2720 return fold_convert (type, expr);
2721
2722 t = expr;
2723
2724 while (true)
2725 {
2726 switch (TREE_CODE (t))
2727 {
2728 case COMPONENT_REF:
2729 if (TREE_OPERAND (t, 2) != NULL_TREE)
2730 return NULL_TREE;
2731 break;
2732
2733 case ARRAY_REF:
2734 case ARRAY_RANGE_REF:
2735 if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
2736 || TREE_OPERAND (t, 2) != NULL_TREE
2737 || TREE_OPERAND (t, 3) != NULL_TREE)
2738 return NULL_TREE;
2739 break;
2740
2741 case BIT_FIELD_REF:
2742 case VIEW_CONVERT_EXPR:
2743 case REALPART_EXPR:
2744 case IMAGPART_EXPR:
2745 break;
2746
2747 case INDIRECT_REF:
2748 if (!TREE_READONLY (t)
2749 || TREE_SIDE_EFFECTS (t)
2750 || !TREE_THIS_NOTRAP (t))
2751 return NULL_TREE;
2752 break;
2753
2754 default:
2755 goto object;
2756 }
2757
2758 t = TREE_OPERAND (t, 0);
2759 }
2760
2761 object:
2762 if (TREE_SIDE_EFFECTS (t))
2763 return NULL_TREE;
2764
2765 if (TREE_CODE (t) == CONST_DECL
2766 && (DECL_EXTERNAL (t)
2767 || decl_function_context (t) != current_function_decl))
2768 return fold_convert (type, expr);
2769
2770 if (!TREE_READONLY (t))
2771 return NULL_TREE;
2772
2773 if (TREE_CODE (t) == PARM_DECL)
2774 return fold_convert (type, expr);
2775
2776 if (TREE_CODE (t) == VAR_DECL
2777 && (DECL_EXTERNAL (t)
2778 || decl_function_context (t) != current_function_decl))
2779 return fold_convert (type, expr);
2780
2781 return NULL_TREE;
2782 }
This page took 0.170381 seconds and 6 git commands to generate.