]> gcc.gnu.org Git - gcc.git/blame - gcc/ada/gcc-interface/utils2.c
trans.c (call_to_gnu): Open a nesting level if this is a statement.
[gcc.git] / gcc / ada / gcc-interface / utils2.c
CommitLineData
a1ab4c31
AC
1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
d47d0a8d 9 * Copyright (C) 1992-2010, Free Software Foundation, Inc. *
a1ab4c31
AC
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
29#include "tm.h"
30#include "tree.h"
a1ab4c31
AC
31#include "ggc.h"
32#include "flags.h"
33#include "output.h"
f82a627c 34#include "tree-inline.h"
e6e15ec9 35
a1ab4c31
AC
36#include "ada.h"
37#include "types.h"
38#include "atree.h"
8713b7e4 39#include "elists.h"
a1ab4c31 40#include "namet.h"
8713b7e4
EB
41#include "nlists.h"
42#include "snames.h"
43#include "stringt.h"
a1ab4c31
AC
44#include "uintp.h"
45#include "fe.h"
a1ab4c31
AC
46#include "sinfo.h"
47#include "einfo.h"
48#include "ada-tree.h"
49#include "gigi.h"
31fcb30f 50
a1ab4c31 51static tree find_common_type (tree, tree);
a1ab4c31
AC
52static tree compare_arrays (tree, tree, tree);
53static tree nonbinary_modular_operation (enum tree_code, tree, tree, tree);
54static tree build_simple_component_ref (tree, tree, tree, bool);
55\f
a1ab4c31
AC
56/* Return the base type of TYPE. */
57
58tree
59get_base_type (tree type)
60{
61 if (TREE_CODE (type) == RECORD_TYPE
62 && TYPE_JUSTIFIED_MODULAR_P (type))
63 type = TREE_TYPE (TYPE_FIELDS (type));
64
65 while (TREE_TYPE (type)
66 && (TREE_CODE (type) == INTEGER_TYPE
67 || TREE_CODE (type) == REAL_TYPE))
68 type = TREE_TYPE (type);
69
70 return type;
71}
72\f
73/* EXP is a GCC tree representing an address. See if we can find how
74 strictly the object at that address is aligned. Return that alignment
75 in bits. If we don't know anything about the alignment, return 0. */
76
77unsigned int
78known_alignment (tree exp)
79{
80 unsigned int this_alignment;
81 unsigned int lhs, rhs;
82
83 switch (TREE_CODE (exp))
84 {
85 CASE_CONVERT:
86 case VIEW_CONVERT_EXPR:
87 case NON_LVALUE_EXPR:
88 /* Conversions between pointers and integers don't change the alignment
89 of the underlying object. */
90 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
91 break;
92
93 case COMPOUND_EXPR:
94 /* The value of a COMPOUND_EXPR is that of it's second operand. */
95 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
96 break;
97
98 case PLUS_EXPR:
99 case MINUS_EXPR:
100 /* If two address are added, the alignment of the result is the
101 minimum of the two alignments. */
102 lhs = known_alignment (TREE_OPERAND (exp, 0));
103 rhs = known_alignment (TREE_OPERAND (exp, 1));
104 this_alignment = MIN (lhs, rhs);
105 break;
106
107 case POINTER_PLUS_EXPR:
108 lhs = known_alignment (TREE_OPERAND (exp, 0));
109 rhs = known_alignment (TREE_OPERAND (exp, 1));
110 /* If we don't know the alignment of the offset, we assume that
111 of the base. */
112 if (rhs == 0)
113 this_alignment = lhs;
114 else
115 this_alignment = MIN (lhs, rhs);
116 break;
117
118 case COND_EXPR:
119 /* If there is a choice between two values, use the smallest one. */
120 lhs = known_alignment (TREE_OPERAND (exp, 1));
121 rhs = known_alignment (TREE_OPERAND (exp, 2));
122 this_alignment = MIN (lhs, rhs);
123 break;
124
125 case INTEGER_CST:
126 {
127 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
128 /* The first part of this represents the lowest bit in the constant,
129 but it is originally in bytes, not bits. */
130 this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
131 }
132 break;
133
134 case MULT_EXPR:
135 /* If we know the alignment of just one side, use it. Otherwise,
136 use the product of the alignments. */
137 lhs = known_alignment (TREE_OPERAND (exp, 0));
138 rhs = known_alignment (TREE_OPERAND (exp, 1));
139
140 if (lhs == 0)
141 this_alignment = rhs;
142 else if (rhs == 0)
143 this_alignment = lhs;
144 else
145 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
146 break;
147
148 case BIT_AND_EXPR:
149 /* A bit-and expression is as aligned as the maximum alignment of the
150 operands. We typically get here for a complex lhs and a constant
151 negative power of two on the rhs to force an explicit alignment, so
152 don't bother looking at the lhs. */
153 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
154 break;
155
156 case ADDR_EXPR:
157 this_alignment = expr_align (TREE_OPERAND (exp, 0));
158 break;
159
f82a627c
EB
160 case CALL_EXPR:
161 {
162 tree t = maybe_inline_call_in_expr (exp);
163 if (t)
164 return known_alignment (t);
165 }
166
167 /* Fall through... */
168
a1ab4c31
AC
169 default:
170 /* For other pointer expressions, we assume that the pointed-to object
171 is at least as aligned as the pointed-to type. Beware that we can
172 have a dummy type here (e.g. a Taft Amendment type), for which the
173 alignment is meaningless and should be ignored. */
174 if (POINTER_TYPE_P (TREE_TYPE (exp))
175 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
176 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
177 else
178 this_alignment = 0;
179 break;
180 }
181
182 return this_alignment;
183}
184\f
185/* We have a comparison or assignment operation on two types, T1 and T2, which
186 are either both array types or both record types. T1 is assumed to be for
187 the left hand side operand, and T2 for the right hand side. Return the
188 type that both operands should be converted to for the operation, if any.
189 Otherwise return zero. */
190
191static tree
192find_common_type (tree t1, tree t2)
193{
194 /* ??? As of today, various constructs lead here with types of different
195 sizes even when both constants (e.g. tagged types, packable vs regular
196 component types, padded vs unpadded types, ...). While some of these
197 would better be handled upstream (types should be made consistent before
198 calling into build_binary_op), some others are really expected and we
199 have to be careful. */
200
201 /* We must prevent writing more than what the target may hold if this is for
202 an assignment and the case of tagged types is handled in build_binary_op
203 so use the lhs type if it is known to be smaller, or of constant size and
204 the rhs type is not, whatever the modes. We also force t1 in case of
205 constant size equality to minimize occurrences of view conversions on the
206 lhs of assignments. */
207 if (TREE_CONSTANT (TYPE_SIZE (t1))
208 && (!TREE_CONSTANT (TYPE_SIZE (t2))
209 || !tree_int_cst_lt (TYPE_SIZE (t2), TYPE_SIZE (t1))))
210 return t1;
211
212 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
213 that we will not have any alignment problems since, if we did, the
214 non-BLKmode type could not have been used. */
215 if (TYPE_MODE (t1) != BLKmode)
216 return t1;
217
218 /* If the rhs type is of constant size, use it whatever the modes. At
219 this point it is known to be smaller, or of constant size and the
220 lhs type is not. */
221 if (TREE_CONSTANT (TYPE_SIZE (t2)))
222 return t2;
223
224 /* Otherwise, if the rhs type is non-BLKmode, use it. */
225 if (TYPE_MODE (t2) != BLKmode)
226 return t2;
227
228 /* In this case, both types have variable size and BLKmode. It's
229 probably best to leave the "type mismatch" because changing it
230 could cause a bad self-referential reference. */
231 return NULL_TREE;
232}
233\f
ced57283
EB
234/* Return an expression tree representing an equality comparison of A1 and A2,
235 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
a1ab4c31 236
ced57283
EB
237 Two arrays are equal in one of two ways: (1) if both have zero length in
238 some dimension (not necessarily the same dimension) or (2) if the lengths
239 in each dimension are equal and the data is equal. We perform the length
240 tests in as efficient a manner as possible. */
a1ab4c31
AC
241
242static tree
243compare_arrays (tree result_type, tree a1, tree a2)
244{
245 tree t1 = TREE_TYPE (a1);
246 tree t2 = TREE_TYPE (a2);
247 tree result = convert (result_type, integer_one_node);
248 tree a1_is_null = convert (result_type, integer_zero_node);
249 tree a2_is_null = convert (result_type, integer_zero_node);
ced57283
EB
250 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
251 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
a1ab4c31
AC
252 bool length_zero_p = false;
253
ced57283
EB
254 /* If either operand has side-effects, they have to be evaluated only once
255 in spite of the multiple references to the operand in the comparison. */
256 if (a1_side_effects_p)
7d7a1fe8 257 a1 = gnat_protect_expr (a1);
ced57283
EB
258
259 if (a2_side_effects_p)
7d7a1fe8 260 a2 = gnat_protect_expr (a2);
ced57283 261
a1ab4c31
AC
262 /* Process each dimension separately and compare the lengths. If any
263 dimension has a size known to be zero, set SIZE_ZERO_P to 1 to
264 suppress the comparison of the data. */
265 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
266 {
267 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
268 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
269 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
270 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
271 tree bt = get_base_type (TREE_TYPE (lb1));
272 tree length1 = fold_build2 (MINUS_EXPR, bt, ub1, lb1);
273 tree length2 = fold_build2 (MINUS_EXPR, bt, ub2, lb2);
a1ab4c31 274 tree comparison, this_a1_is_null, this_a2_is_null;
ced57283
EB
275 tree nbt, tem;
276 bool btem;
a1ab4c31
AC
277
278 /* If the length of the first array is a constant, swap our operands
279 unless the length of the second array is the constant zero.
280 Note that we have set the `length' values to the length - 1. */
281 if (TREE_CODE (length1) == INTEGER_CST
282 && !integer_zerop (fold_build2 (PLUS_EXPR, bt, length2,
283 convert (bt, integer_one_node))))
284 {
285 tem = a1, a1 = a2, a2 = tem;
286 tem = t1, t1 = t2, t2 = tem;
287 tem = lb1, lb1 = lb2, lb2 = tem;
288 tem = ub1, ub1 = ub2, ub2 = tem;
289 tem = length1, length1 = length2, length2 = tem;
290 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
ced57283
EB
291 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
292 a2_side_effects_p = btem;
a1ab4c31
AC
293 }
294
295 /* If the length of this dimension in the second array is the constant
296 zero, we can just go inside the original bounds for the first
297 array and see if last < first. */
298 if (integer_zerop (fold_build2 (PLUS_EXPR, bt, length2,
299 convert (bt, integer_one_node))))
300 {
301 tree ub = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
302 tree lb = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
303
304 comparison = build_binary_op (LT_EXPR, result_type, ub, lb);
305 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
306 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
307
308 length_zero_p = true;
309 this_a1_is_null = comparison;
310 this_a2_is_null = convert (result_type, integer_one_node);
311 }
312
313 /* If the length is some other constant value, we know that the
314 this dimension in the first array cannot be superflat, so we
315 can just use its length from the actual stored bounds. */
316 else if (TREE_CODE (length2) == INTEGER_CST)
317 {
318 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
319 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
320 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
321 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
322 nbt = get_base_type (TREE_TYPE (ub1));
323
324 comparison
325 = build_binary_op (EQ_EXPR, result_type,
326 build_binary_op (MINUS_EXPR, nbt, ub1, lb1),
327 build_binary_op (MINUS_EXPR, nbt, ub2, lb2));
328
329 /* Note that we know that UB2 and LB2 are constant and hence
330 cannot contain a PLACEHOLDER_EXPR. */
331
332 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
333 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
334
335 this_a1_is_null = build_binary_op (LT_EXPR, result_type, ub1, lb1);
336 this_a2_is_null = convert (result_type, integer_zero_node);
337 }
338
339 /* Otherwise compare the computed lengths. */
340 else
341 {
342 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
343 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
344
345 comparison
346 = build_binary_op (EQ_EXPR, result_type, length1, length2);
347
348 this_a1_is_null
349 = build_binary_op (LT_EXPR, result_type, length1,
350 convert (bt, integer_zero_node));
351 this_a2_is_null
352 = build_binary_op (LT_EXPR, result_type, length2,
353 convert (bt, integer_zero_node));
354 }
355
356 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
357 result, comparison);
358
359 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
360 this_a1_is_null, a1_is_null);
361 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
362 this_a2_is_null, a2_is_null);
363
364 t1 = TREE_TYPE (t1);
365 t2 = TREE_TYPE (t2);
366 }
367
368 /* Unless the size of some bound is known to be zero, compare the
369 data in the array. */
370 if (!length_zero_p)
371 {
372 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
373
374 if (type)
ced57283
EB
375 {
376 a1 = convert (type, a1),
377 a2 = convert (type, a2);
378 }
a1ab4c31
AC
379
380 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result,
381 fold_build2 (EQ_EXPR, result_type, a1, a2));
a1ab4c31
AC
382 }
383
384 /* The result is also true if both sizes are zero. */
385 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
386 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
387 a1_is_null, a2_is_null),
388 result);
389
ced57283
EB
390 /* If either operand has side-effects, they have to be evaluated before
391 starting the comparison above since the place they would be otherwise
392 evaluated could be wrong. */
393 if (a1_side_effects_p)
a1ab4c31
AC
394 result = build2 (COMPOUND_EXPR, result_type, a1, result);
395
ced57283 396 if (a2_side_effects_p)
a1ab4c31
AC
397 result = build2 (COMPOUND_EXPR, result_type, a2, result);
398
399 return result;
400}
401\f
402/* Compute the result of applying OP_CODE to LHS and RHS, where both are of
403 type TYPE. We know that TYPE is a modular type with a nonbinary
404 modulus. */
405
406static tree
407nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
408 tree rhs)
409{
410 tree modulus = TYPE_MODULUS (type);
411 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
412 unsigned int precision;
413 bool unsignedp = true;
414 tree op_type = type;
415 tree result;
416
417 /* If this is an addition of a constant, convert it to a subtraction
418 of a constant since we can do that faster. */
419 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
420 {
421 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
422 op_code = MINUS_EXPR;
423 }
424
425 /* For the logical operations, we only need PRECISION bits. For
426 addition and subtraction, we need one more and for multiplication we
427 need twice as many. But we never want to make a size smaller than
428 our size. */
429 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
430 needed_precision += 1;
431 else if (op_code == MULT_EXPR)
432 needed_precision *= 2;
433
434 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
435
436 /* Unsigned will do for everything but subtraction. */
437 if (op_code == MINUS_EXPR)
438 unsignedp = false;
439
440 /* If our type is the wrong signedness or isn't wide enough, make a new
441 type and convert both our operands to it. */
442 if (TYPE_PRECISION (op_type) < precision
443 || TYPE_UNSIGNED (op_type) != unsignedp)
444 {
445 /* Copy the node so we ensure it can be modified to make it modular. */
446 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
447 modulus = convert (op_type, modulus);
448 SET_TYPE_MODULUS (op_type, modulus);
449 TYPE_MODULAR_P (op_type) = 1;
450 lhs = convert (op_type, lhs);
451 rhs = convert (op_type, rhs);
452 }
453
454 /* Do the operation, then we'll fix it up. */
455 result = fold_build2 (op_code, op_type, lhs, rhs);
456
457 /* For multiplication, we have no choice but to do a full modulus
458 operation. However, we want to do this in the narrowest
459 possible size. */
460 if (op_code == MULT_EXPR)
461 {
462 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
463 modulus = convert (div_type, modulus);
464 SET_TYPE_MODULUS (div_type, modulus);
465 TYPE_MODULAR_P (div_type) = 1;
466 result = convert (op_type,
467 fold_build2 (TRUNC_MOD_EXPR, div_type,
468 convert (div_type, result), modulus));
469 }
470
471 /* For subtraction, add the modulus back if we are negative. */
472 else if (op_code == MINUS_EXPR)
473 {
7d7a1fe8 474 result = gnat_protect_expr (result);
a1ab4c31
AC
475 result = fold_build3 (COND_EXPR, op_type,
476 fold_build2 (LT_EXPR, integer_type_node, result,
477 convert (op_type, integer_zero_node)),
478 fold_build2 (PLUS_EXPR, op_type, result, modulus),
479 result);
480 }
481
482 /* For the other operations, subtract the modulus if we are >= it. */
483 else
484 {
7d7a1fe8 485 result = gnat_protect_expr (result);
a1ab4c31
AC
486 result = fold_build3 (COND_EXPR, op_type,
487 fold_build2 (GE_EXPR, integer_type_node,
488 result, modulus),
489 fold_build2 (MINUS_EXPR, op_type,
490 result, modulus),
491 result);
492 }
493
494 return convert (type, result);
495}
496\f
497/* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
498 desired for the result. Usually the operation is to be performed
499 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
500 in which case the type to be used will be derived from the operands.
501
502 This function is very much unlike the ones for C and C++ since we
503 have already done any type conversion and matching required. All we
504 have to do here is validate the work done by SEM and handle subtypes. */
505
506tree
507build_binary_op (enum tree_code op_code, tree result_type,
508 tree left_operand, tree right_operand)
509{
510 tree left_type = TREE_TYPE (left_operand);
511 tree right_type = TREE_TYPE (right_operand);
512 tree left_base_type = get_base_type (left_type);
513 tree right_base_type = get_base_type (right_type);
514 tree operation_type = result_type;
515 tree best_type = NULL_TREE;
516 tree modulus, result;
517 bool has_side_effects = false;
518
519 if (operation_type
520 && TREE_CODE (operation_type) == RECORD_TYPE
521 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
522 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
523
524 if (operation_type
525 && !AGGREGATE_TYPE_P (operation_type)
526 && TYPE_EXTRA_SUBTYPE_P (operation_type))
527 operation_type = get_base_type (operation_type);
528
529 modulus = (operation_type
530 && TREE_CODE (operation_type) == INTEGER_TYPE
531 && TYPE_MODULAR_P (operation_type)
532 ? TYPE_MODULUS (operation_type) : NULL_TREE);
533
534 switch (op_code)
535 {
d47d0a8d 536 case INIT_EXPR:
a1ab4c31
AC
537 case MODIFY_EXPR:
538 /* If there were integral or pointer conversions on the LHS, remove
539 them; we'll be putting them back below if needed. Likewise for
540 conversions between array and record types, except for justified
541 modular types. But don't do this if the right operand is not
542 BLKmode (for packed arrays) unless we are not changing the mode. */
543 while ((CONVERT_EXPR_P (left_operand)
544 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
545 && (((INTEGRAL_TYPE_P (left_type)
546 || POINTER_TYPE_P (left_type))
547 && (INTEGRAL_TYPE_P (TREE_TYPE
548 (TREE_OPERAND (left_operand, 0)))
549 || POINTER_TYPE_P (TREE_TYPE
550 (TREE_OPERAND (left_operand, 0)))))
551 || (((TREE_CODE (left_type) == RECORD_TYPE
552 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
553 || TREE_CODE (left_type) == ARRAY_TYPE)
554 && ((TREE_CODE (TREE_TYPE
555 (TREE_OPERAND (left_operand, 0)))
556 == RECORD_TYPE)
557 || (TREE_CODE (TREE_TYPE
558 (TREE_OPERAND (left_operand, 0)))
559 == ARRAY_TYPE))
560 && (TYPE_MODE (right_type) == BLKmode
561 || (TYPE_MODE (left_type)
562 == TYPE_MODE (TREE_TYPE
563 (TREE_OPERAND
564 (left_operand, 0))))))))
565 {
566 left_operand = TREE_OPERAND (left_operand, 0);
567 left_type = TREE_TYPE (left_operand);
568 }
569
570 /* If a class-wide type may be involved, force use of the RHS type. */
571 if ((TREE_CODE (right_type) == RECORD_TYPE
572 || TREE_CODE (right_type) == UNION_TYPE)
573 && TYPE_ALIGN_OK (right_type))
574 operation_type = right_type;
575
576 /* If we are copying between padded objects with compatible types, use
577 the padded view of the objects, this is very likely more efficient.
342f368c
EB
578 Likewise for a padded object that is assigned a constructor, if we
579 can convert the constructor to the inner type, to avoid putting a
580 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
581 actually copied anything. */
315cff15 582 else if (TYPE_IS_PADDING_P (left_type)
a1ab4c31
AC
583 && TREE_CONSTANT (TYPE_SIZE (left_type))
584 && ((TREE_CODE (right_operand) == COMPONENT_REF
a1ab4c31
AC
585 && TYPE_IS_PADDING_P
586 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
587 && gnat_types_compatible_p
342f368c
EB
588 (left_type,
589 TREE_TYPE (TREE_OPERAND (right_operand, 0))))
590 || (TREE_CODE (right_operand) == CONSTRUCTOR
591 && !CONTAINS_PLACEHOLDER_P
592 (DECL_SIZE (TYPE_FIELDS (left_type)))))
a1ab4c31
AC
593 && !integer_zerop (TYPE_SIZE (right_type)))
594 operation_type = left_type;
595
596 /* Find the best type to use for copying between aggregate types. */
597 else if (((TREE_CODE (left_type) == ARRAY_TYPE
598 && TREE_CODE (right_type) == ARRAY_TYPE)
599 || (TREE_CODE (left_type) == RECORD_TYPE
600 && TREE_CODE (right_type) == RECORD_TYPE))
601 && (best_type = find_common_type (left_type, right_type)))
602 operation_type = best_type;
603
604 /* Otherwise use the LHS type. */
605 else if (!operation_type)
606 operation_type = left_type;
607
608 /* Ensure everything on the LHS is valid. If we have a field reference,
609 strip anything that get_inner_reference can handle. Then remove any
610 conversions between types having the same code and mode. And mark
611 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
612 either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
613 result = left_operand;
614 while (true)
615 {
616 tree restype = TREE_TYPE (result);
617
618 if (TREE_CODE (result) == COMPONENT_REF
619 || TREE_CODE (result) == ARRAY_REF
620 || TREE_CODE (result) == ARRAY_RANGE_REF)
621 while (handled_component_p (result))
622 result = TREE_OPERAND (result, 0);
623 else if (TREE_CODE (result) == REALPART_EXPR
624 || TREE_CODE (result) == IMAGPART_EXPR
625 || (CONVERT_EXPR_P (result)
626 && (((TREE_CODE (restype)
627 == TREE_CODE (TREE_TYPE
628 (TREE_OPERAND (result, 0))))
629 && (TYPE_MODE (TREE_TYPE
630 (TREE_OPERAND (result, 0)))
631 == TYPE_MODE (restype)))
632 || TYPE_ALIGN_OK (restype))))
633 result = TREE_OPERAND (result, 0);
634 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
635 {
636 TREE_ADDRESSABLE (result) = 1;
637 result = TREE_OPERAND (result, 0);
638 }
639 else
640 break;
641 }
642
643 gcc_assert (TREE_CODE (result) == INDIRECT_REF
644 || TREE_CODE (result) == NULL_EXPR
645 || DECL_P (result));
646
647 /* Convert the right operand to the operation type unless it is
648 either already of the correct type or if the type involves a
649 placeholder, since the RHS may not have the same record type. */
650 if (operation_type != right_type
651 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
652 {
653 right_operand = convert (operation_type, right_operand);
654 right_type = operation_type;
655 }
656
657 /* If the left operand is not of the same type as the operation
658 type, wrap it up in a VIEW_CONVERT_EXPR. */
659 if (left_type != operation_type)
660 left_operand = unchecked_convert (operation_type, left_operand, false);
661
662 has_side_effects = true;
663 modulus = NULL_TREE;
664 break;
665
666 case ARRAY_REF:
667 if (!operation_type)
668 operation_type = TREE_TYPE (left_type);
669
670 /* ... fall through ... */
671
672 case ARRAY_RANGE_REF:
673 /* First look through conversion between type variants. Note that
674 this changes neither the operation type nor the type domain. */
675 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
676 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
677 == TYPE_MAIN_VARIANT (left_type))
678 {
679 left_operand = TREE_OPERAND (left_operand, 0);
680 left_type = TREE_TYPE (left_operand);
681 }
682
3f273c8a
EB
683 /* For a range, make sure the element type is consistent. */
684 if (op_code == ARRAY_RANGE_REF
685 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
686 operation_type = build_array_type (TREE_TYPE (left_type),
687 TYPE_DOMAIN (operation_type));
688
84fb43a1
EB
689 /* Then convert the right operand to its base type. This will prevent
690 unneeded sign conversions when sizetype is wider than integer. */
a1ab4c31 691 right_operand = convert (right_base_type, right_operand);
84fb43a1 692 right_operand = convert (sizetype, right_operand);
a1ab4c31
AC
693
694 if (!TREE_CONSTANT (right_operand)
695 || !TREE_CONSTANT (TYPE_MIN_VALUE (right_type)))
696 gnat_mark_addressable (left_operand);
697
698 modulus = NULL_TREE;
699 break;
700
701 case GE_EXPR:
702 case LE_EXPR:
703 case GT_EXPR:
704 case LT_EXPR:
705 gcc_assert (!POINTER_TYPE_P (left_type));
706
707 /* ... fall through ... */
708
709 case EQ_EXPR:
710 case NE_EXPR:
711 /* If either operand is a NULL_EXPR, just return a new one. */
712 if (TREE_CODE (left_operand) == NULL_EXPR)
713 return build2 (op_code, result_type,
714 build1 (NULL_EXPR, integer_type_node,
715 TREE_OPERAND (left_operand, 0)),
716 integer_zero_node);
717
718 else if (TREE_CODE (right_operand) == NULL_EXPR)
719 return build2 (op_code, result_type,
720 build1 (NULL_EXPR, integer_type_node,
721 TREE_OPERAND (right_operand, 0)),
722 integer_zero_node);
723
724 /* If either object is a justified modular types, get the
725 fields from within. */
726 if (TREE_CODE (left_type) == RECORD_TYPE
727 && TYPE_JUSTIFIED_MODULAR_P (left_type))
728 {
729 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
730 left_operand);
731 left_type = TREE_TYPE (left_operand);
732 left_base_type = get_base_type (left_type);
733 }
734
735 if (TREE_CODE (right_type) == RECORD_TYPE
736 && TYPE_JUSTIFIED_MODULAR_P (right_type))
737 {
738 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
739 right_operand);
740 right_type = TREE_TYPE (right_operand);
741 right_base_type = get_base_type (right_type);
742 }
743
744 /* If both objects are arrays, compare them specially. */
745 if ((TREE_CODE (left_type) == ARRAY_TYPE
746 || (TREE_CODE (left_type) == INTEGER_TYPE
747 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
748 && (TREE_CODE (right_type) == ARRAY_TYPE
749 || (TREE_CODE (right_type) == INTEGER_TYPE
750 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
751 {
752 result = compare_arrays (result_type, left_operand, right_operand);
753
754 if (op_code == NE_EXPR)
755 result = invert_truthvalue (result);
756 else
757 gcc_assert (op_code == EQ_EXPR);
758
759 return result;
760 }
761
58f1b706
EB
762 /* Otherwise, the base types must be the same, unless they are both fat
763 pointer types or record types. In the latter case, use the best type
764 and convert both operands to that type. */
a1ab4c31
AC
765 if (left_base_type != right_base_type)
766 {
315cff15 767 if (TYPE_IS_FAT_POINTER_P (left_base_type)
58f1b706
EB
768 && TYPE_IS_FAT_POINTER_P (right_base_type))
769 {
770 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
771 == TYPE_MAIN_VARIANT (right_base_type));
772 best_type = left_base_type;
773 }
774
a1ab4c31
AC
775 else if (TREE_CODE (left_base_type) == RECORD_TYPE
776 && TREE_CODE (right_base_type) == RECORD_TYPE)
777 {
58f1b706
EB
778 /* The only way this is permitted is if both types have the same
779 name. In that case, one of them must not be self-referential.
780 Use it as the best type. Even better with a fixed size. */
a1ab4c31 781 gcc_assert (TYPE_NAME (left_base_type)
58f1b706
EB
782 && TYPE_NAME (left_base_type)
783 == TYPE_NAME (right_base_type));
a1ab4c31
AC
784
785 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
786 best_type = left_base_type;
787 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
788 best_type = right_base_type;
789 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
790 best_type = left_base_type;
791 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
792 best_type = right_base_type;
793 else
794 gcc_unreachable ();
795 }
58f1b706 796
a1ab4c31
AC
797 else
798 gcc_unreachable ();
799
800 left_operand = convert (best_type, left_operand);
801 right_operand = convert (best_type, right_operand);
802 }
a1ab4c31
AC
803 else
804 {
805 left_operand = convert (left_base_type, left_operand);
806 right_operand = convert (right_base_type, right_operand);
807 }
808
58f1b706
EB
809 /* If we are comparing a fat pointer against zero, we just need to
810 compare the data pointer. */
811 if (TYPE_IS_FAT_POINTER_P (left_base_type)
812 && TREE_CODE (right_operand) == CONSTRUCTOR
813 && integer_zerop (VEC_index (constructor_elt,
814 CONSTRUCTOR_ELTS (right_operand),
815 0)->value))
816 {
817 left_operand
818 = build_component_ref (left_operand, NULL_TREE,
819 TYPE_FIELDS (left_base_type), false);
820 right_operand
821 = convert (TREE_TYPE (left_operand), integer_zero_node);
822 }
823
a1ab4c31
AC
824 modulus = NULL_TREE;
825 break;
826
827 case PREINCREMENT_EXPR:
828 case PREDECREMENT_EXPR:
829 case POSTINCREMENT_EXPR:
830 case POSTDECREMENT_EXPR:
82d3b03a
EB
831 /* These operations are not used anymore. */
832 gcc_unreachable ();
a1ab4c31
AC
833
834 case LSHIFT_EXPR:
835 case RSHIFT_EXPR:
836 case LROTATE_EXPR:
837 case RROTATE_EXPR:
838 /* The RHS of a shift can be any type. Also, ignore any modulus
839 (we used to abort, but this is needed for unchecked conversion
840 to modular types). Otherwise, processing is the same as normal. */
841 gcc_assert (operation_type == left_base_type);
842 modulus = NULL_TREE;
843 left_operand = convert (operation_type, left_operand);
844 break;
845
a1ab4c31
AC
846 case BIT_AND_EXPR:
847 case BIT_IOR_EXPR:
848 case BIT_XOR_EXPR:
849 /* For binary modulus, if the inputs are in range, so are the
850 outputs. */
851 if (modulus && integer_pow2p (modulus))
852 modulus = NULL_TREE;
a1ab4c31
AC
853 goto common;
854
855 case COMPLEX_EXPR:
856 gcc_assert (TREE_TYPE (result_type) == left_base_type
857 && TREE_TYPE (result_type) == right_base_type);
858 left_operand = convert (left_base_type, left_operand);
859 right_operand = convert (right_base_type, right_operand);
860 break;
861
862 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
863 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
864 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
865 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
866 /* These always produce results lower than either operand. */
867 modulus = NULL_TREE;
868 goto common;
869
870 case POINTER_PLUS_EXPR:
871 gcc_assert (operation_type == left_base_type
872 && sizetype == right_base_type);
873 left_operand = convert (operation_type, left_operand);
874 right_operand = convert (sizetype, right_operand);
875 break;
876
82d3b03a
EB
877 case PLUS_NOMOD_EXPR:
878 case MINUS_NOMOD_EXPR:
879 if (op_code == PLUS_NOMOD_EXPR)
880 op_code = PLUS_EXPR;
881 else
882 op_code = MINUS_EXPR;
883 modulus = NULL_TREE;
884
885 /* ... fall through ... */
886
d2143736
EB
887 case PLUS_EXPR:
888 case MINUS_EXPR:
b7babd5d
EB
889 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
890 other compilers. Contrary to C, Ada doesn't allow arithmetics in
891 these types but can generate addition/subtraction for Succ/Pred. */
892 if (operation_type
893 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
894 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
895 operation_type = left_base_type = right_base_type
896 = gnat_type_for_mode (TYPE_MODE (operation_type),
897 TYPE_UNSIGNED (operation_type));
82d3b03a
EB
898
899 /* ... fall through ... */
d2143736 900
a1ab4c31
AC
901 default:
902 common:
903 /* The result type should be the same as the base types of the
904 both operands (and they should be the same). Convert
905 everything to the result type. */
906
907 gcc_assert (operation_type == left_base_type
908 && left_base_type == right_base_type);
909 left_operand = convert (operation_type, left_operand);
910 right_operand = convert (operation_type, right_operand);
911 }
912
913 if (modulus && !integer_pow2p (modulus))
914 {
915 result = nonbinary_modular_operation (op_code, operation_type,
916 left_operand, right_operand);
917 modulus = NULL_TREE;
918 }
919 /* If either operand is a NULL_EXPR, just return a new one. */
920 else if (TREE_CODE (left_operand) == NULL_EXPR)
921 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
922 else if (TREE_CODE (right_operand) == NULL_EXPR)
923 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
924 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
925 result = fold (build4 (op_code, operation_type, left_operand,
926 right_operand, NULL_TREE, NULL_TREE));
927 else
928 result
929 = fold_build2 (op_code, operation_type, left_operand, right_operand);
930
931 TREE_SIDE_EFFECTS (result) |= has_side_effects;
932 TREE_CONSTANT (result)
933 |= (TREE_CONSTANT (left_operand) & TREE_CONSTANT (right_operand)
934 && op_code != ARRAY_REF && op_code != ARRAY_RANGE_REF);
935
936 if ((op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
937 && TYPE_VOLATILE (operation_type))
938 TREE_THIS_VOLATILE (result) = 1;
939
940 /* If we are working with modular types, perform the MOD operation
941 if something above hasn't eliminated the need for it. */
942 if (modulus)
943 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
944 convert (operation_type, modulus));
945
946 if (result_type && result_type != operation_type)
947 result = convert (result_type, result);
948
949 return result;
950}
951\f
952/* Similar, but for unary operations. */
953
954tree
955build_unary_op (enum tree_code op_code, tree result_type, tree operand)
956{
957 tree type = TREE_TYPE (operand);
958 tree base_type = get_base_type (type);
959 tree operation_type = result_type;
960 tree result;
961 bool side_effects = false;
962
963 if (operation_type
964 && TREE_CODE (operation_type) == RECORD_TYPE
965 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
966 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
967
968 if (operation_type
969 && !AGGREGATE_TYPE_P (operation_type)
970 && TYPE_EXTRA_SUBTYPE_P (operation_type))
971 operation_type = get_base_type (operation_type);
972
973 switch (op_code)
974 {
975 case REALPART_EXPR:
976 case IMAGPART_EXPR:
977 if (!operation_type)
978 result_type = operation_type = TREE_TYPE (type);
979 else
980 gcc_assert (result_type == TREE_TYPE (type));
981
982 result = fold_build1 (op_code, operation_type, operand);
983 break;
984
985 case TRUTH_NOT_EXPR:
986 gcc_assert (result_type == base_type);
3f13dd77 987 result = invert_truthvalue (operand);
a1ab4c31
AC
988 break;
989
990 case ATTR_ADDR_EXPR:
991 case ADDR_EXPR:
992 switch (TREE_CODE (operand))
993 {
994 case INDIRECT_REF:
995 case UNCONSTRAINED_ARRAY_REF:
996 result = TREE_OPERAND (operand, 0);
997
998 /* Make sure the type here is a pointer, not a reference.
999 GCC wants pointer types for function addresses. */
1000 if (!result_type)
1001 result_type = build_pointer_type (type);
1002
1003 /* If the underlying object can alias everything, propagate the
1004 property since we are effectively retrieving the object. */
1005 if (POINTER_TYPE_P (TREE_TYPE (result))
1006 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1007 {
1008 if (TREE_CODE (result_type) == POINTER_TYPE
1009 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1010 result_type
1011 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1012 TYPE_MODE (result_type),
1013 true);
1014 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1015 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1016 result_type
1017 = build_reference_type_for_mode (TREE_TYPE (result_type),
1018 TYPE_MODE (result_type),
1019 true);
1020 }
1021 break;
1022
1023 case NULL_EXPR:
1024 result = operand;
1025 TREE_TYPE (result) = type = build_pointer_type (type);
1026 break;
1027
0b3467c4
EB
1028 case COMPOUND_EXPR:
1029 /* Fold a compound expression if it has unconstrained array type
1030 since the middle-end cannot handle it. But we don't it in the
1031 general case because it may introduce aliasing issues if the
1032 first operand is an indirect assignment and the second operand
1033 the corresponding address, e.g. for an allocator. */
1034 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1035 {
1036 result = build_unary_op (ADDR_EXPR, result_type,
1037 TREE_OPERAND (operand, 1));
1038 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1039 TREE_OPERAND (operand, 0), result);
1040 break;
1041 }
1042 goto common;
1043
a1ab4c31
AC
1044 case ARRAY_REF:
1045 case ARRAY_RANGE_REF:
1046 case COMPONENT_REF:
1047 case BIT_FIELD_REF:
1048 /* If this is for 'Address, find the address of the prefix and
1049 add the offset to the field. Otherwise, do this the normal
1050 way. */
1051 if (op_code == ATTR_ADDR_EXPR)
1052 {
1053 HOST_WIDE_INT bitsize;
1054 HOST_WIDE_INT bitpos;
1055 tree offset, inner;
1056 enum machine_mode mode;
1057 int unsignedp, volatilep;
1058
1059 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1060 &mode, &unsignedp, &volatilep,
1061 false);
1062
1063 /* If INNER is a padding type whose field has a self-referential
1064 size, convert to that inner type. We know the offset is zero
1065 and we need to have that type visible. */
315cff15
EB
1066 if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
1067 && CONTAINS_PLACEHOLDER_P
1068 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1069 (TREE_TYPE (inner))))))
a1ab4c31
AC
1070 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1071 inner);
1072
1073 /* Compute the offset as a byte offset from INNER. */
1074 if (!offset)
1075 offset = size_zero_node;
1076
1077 if (bitpos % BITS_PER_UNIT != 0)
1078 post_error
1079 ("taking address of object not aligned on storage unit?",
1080 error_gnat_node);
1081
1082 offset = size_binop (PLUS_EXPR, offset,
1083 size_int (bitpos / BITS_PER_UNIT));
1084
1085 /* Take the address of INNER, convert the offset to void *, and
1086 add then. It will later be converted to the desired result
1087 type, if any. */
1088 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1089 inner = convert (ptr_void_type_node, inner);
1090 result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1091 inner, offset);
1092 result = convert (build_pointer_type (TREE_TYPE (operand)),
1093 result);
1094 break;
1095 }
1096 goto common;
1097
1098 case CONSTRUCTOR:
1099 /* If this is just a constructor for a padded record, we can
1100 just take the address of the single field and convert it to
1101 a pointer to our type. */
315cff15 1102 if (TYPE_IS_PADDING_P (type))
a1ab4c31 1103 {
315cff15
EB
1104 result = VEC_index (constructor_elt,
1105 CONSTRUCTOR_ELTS (operand),
1106 0)->value;
a1ab4c31
AC
1107 result = convert (build_pointer_type (TREE_TYPE (operand)),
1108 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1109 break;
1110 }
1111
1112 goto common;
1113
1114 case NOP_EXPR:
1115 if (AGGREGATE_TYPE_P (type)
1116 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1117 return build_unary_op (ADDR_EXPR, result_type,
1118 TREE_OPERAND (operand, 0));
1119
1120 /* ... fallthru ... */
1121
1122 case VIEW_CONVERT_EXPR:
1123 /* If this just a variant conversion or if the conversion doesn't
1124 change the mode, get the result type from this type and go down.
1125 This is needed for conversions of CONST_DECLs, to eventually get
1126 to the address of their CORRESPONDING_VARs. */
1127 if ((TYPE_MAIN_VARIANT (type)
1128 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1129 || (TYPE_MODE (type) != BLKmode
1130 && (TYPE_MODE (type)
1131 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1132 return build_unary_op (ADDR_EXPR,
1133 (result_type ? result_type
1134 : build_pointer_type (type)),
1135 TREE_OPERAND (operand, 0));
1136 goto common;
1137
1138 case CONST_DECL:
1139 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1140
1141 /* ... fall through ... */
1142
1143 default:
1144 common:
1145
1146 /* If we are taking the address of a padded record whose field is
1147 contains a template, take the address of the template. */
315cff15 1148 if (TYPE_IS_PADDING_P (type)
a1ab4c31
AC
1149 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1150 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1151 {
1152 type = TREE_TYPE (TYPE_FIELDS (type));
1153 operand = convert (type, operand);
1154 }
1155
1156 if (type != error_mark_node)
1157 operation_type = build_pointer_type (type);
1158
1159 gnat_mark_addressable (operand);
1160 result = fold_build1 (ADDR_EXPR, operation_type, operand);
1161 }
1162
1163 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1164 break;
1165
1166 case INDIRECT_REF:
1167 /* If we want to refer to an entire unconstrained array,
1168 make up an expression to do so. This will never survive to
1169 the backend. If TYPE is a thin pointer, first convert the
1170 operand to a fat pointer. */
315cff15 1171 if (TYPE_IS_THIN_POINTER_P (type)
a1ab4c31
AC
1172 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
1173 {
1174 operand
1175 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1176 operand);
1177 type = TREE_TYPE (operand);
1178 }
1179
315cff15 1180 if (TYPE_IS_FAT_POINTER_P (type))
a1ab4c31
AC
1181 {
1182 result = build1 (UNCONSTRAINED_ARRAY_REF,
1183 TYPE_UNCONSTRAINED_ARRAY (type), operand);
ced57283 1184 TREE_READONLY (result)
a1ab4c31
AC
1185 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1186 }
1187 else if (TREE_CODE (operand) == ADDR_EXPR)
1188 result = TREE_OPERAND (operand, 0);
1189
1190 else
1191 {
1192 result = fold_build1 (op_code, TREE_TYPE (type), operand);
1193 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1194 }
1195
1196 side_effects
315cff15 1197 = (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)));
a1ab4c31
AC
1198 break;
1199
1200 case NEGATE_EXPR:
1201 case BIT_NOT_EXPR:
1202 {
1203 tree modulus = ((operation_type
1204 && TREE_CODE (operation_type) == INTEGER_TYPE
1205 && TYPE_MODULAR_P (operation_type))
1206 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1207 int mod_pow2 = modulus && integer_pow2p (modulus);
1208
1209 /* If this is a modular type, there are various possibilities
1210 depending on the operation and whether the modulus is a
1211 power of two or not. */
1212
1213 if (modulus)
1214 {
1215 gcc_assert (operation_type == base_type);
1216 operand = convert (operation_type, operand);
1217
1218 /* The fastest in the negate case for binary modulus is
1219 the straightforward code; the TRUNC_MOD_EXPR below
1220 is an AND operation. */
1221 if (op_code == NEGATE_EXPR && mod_pow2)
1222 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1223 fold_build1 (NEGATE_EXPR, operation_type,
1224 operand),
1225 modulus);
1226
1227 /* For nonbinary negate case, return zero for zero operand,
1228 else return the modulus minus the operand. If the modulus
1229 is a power of two minus one, we can do the subtraction
1230 as an XOR since it is equivalent and faster on most machines. */
1231 else if (op_code == NEGATE_EXPR && !mod_pow2)
1232 {
1233 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1234 modulus,
1235 convert (operation_type,
1236 integer_one_node))))
1237 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1238 operand, modulus);
1239 else
1240 result = fold_build2 (MINUS_EXPR, operation_type,
1241 modulus, operand);
1242
1243 result = fold_build3 (COND_EXPR, operation_type,
1244 fold_build2 (NE_EXPR,
1245 integer_type_node,
1246 operand,
1247 convert
1248 (operation_type,
1249 integer_zero_node)),
1250 result, operand);
1251 }
1252 else
1253 {
1254 /* For the NOT cases, we need a constant equal to
1255 the modulus minus one. For a binary modulus, we
1256 XOR against the constant and subtract the operand from
1257 that constant for nonbinary modulus. */
1258
1259 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1260 convert (operation_type,
1261 integer_one_node));
1262
1263 if (mod_pow2)
1264 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1265 operand, cnst);
1266 else
1267 result = fold_build2 (MINUS_EXPR, operation_type,
1268 cnst, operand);
1269 }
1270
1271 break;
1272 }
1273 }
1274
1275 /* ... fall through ... */
1276
1277 default:
1278 gcc_assert (operation_type == base_type);
1279 result = fold_build1 (op_code, operation_type,
1280 convert (operation_type, operand));
1281 }
1282
1283 if (side_effects)
1284 {
1285 TREE_SIDE_EFFECTS (result) = 1;
1286 if (TREE_CODE (result) == INDIRECT_REF)
1287 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1288 }
1289
1290 if (result_type && TREE_TYPE (result) != result_type)
1291 result = convert (result_type, result);
1292
1293 return result;
1294}
1295\f
1296/* Similar, but for COND_EXPR. */
1297
1298tree
1299build_cond_expr (tree result_type, tree condition_operand,
1300 tree true_operand, tree false_operand)
1301{
a1ab4c31 1302 bool addr_p = false;
1275de7d 1303 tree result;
a1ab4c31 1304
1275de7d
EB
1305 /* The front-end verified that result, true and false operands have
1306 same base type. Convert everything to the result type. */
1307 true_operand = convert (result_type, true_operand);
a1ab4c31
AC
1308 false_operand = convert (result_type, false_operand);
1309
87fa3d34
EB
1310 /* If the result type is unconstrained, take the address of the operands and
1311 then dereference the result. Likewise if the result type is passed by
cb3d597d 1312 reference, but this is natively handled in the gimplifier. */
a1ab4c31 1313 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
cb3d597d 1314 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
a1ab4c31 1315 {
a1ab4c31
AC
1316 result_type = build_pointer_type (result_type);
1317 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1318 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1275de7d 1319 addr_p = true;
a1ab4c31
AC
1320 }
1321
1322 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1323 true_operand, false_operand);
1324
1275de7d
EB
1325 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1326 in both arms, make sure it gets evaluated by moving it ahead of the
1327 conditional expression. This is necessary because it is evaluated
1328 in only one place at run time and would otherwise be uninitialized
1329 in one of the arms. */
1330 true_operand = skip_simple_arithmetic (true_operand);
a1ab4c31
AC
1331 false_operand = skip_simple_arithmetic (false_operand);
1332
1275de7d 1333 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
a1ab4c31
AC
1334 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1335
1275de7d 1336 if (addr_p)
a1ab4c31
AC
1337 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1338
1339 return result;
1340}
1341
d47d0a8d
EB
1342/* Similar, but for RETURN_EXPR. If RET_VAL is non-null, build a RETURN_EXPR
1343 around the assignment of RET_VAL to RET_OBJ. Otherwise just build a bare
1344 RETURN_EXPR around RESULT_OBJ, which may be null in this case. */
a1ab4c31
AC
1345
1346tree
d47d0a8d 1347build_return_expr (tree ret_obj, tree ret_val)
a1ab4c31
AC
1348{
1349 tree result_expr;
1350
d47d0a8d 1351 if (ret_val)
a1ab4c31
AC
1352 {
1353 /* The gimplifier explicitly enforces the following invariant:
1354
d47d0a8d
EB
1355 RETURN_EXPR
1356 |
1357 MODIFY_EXPR
1358 / \
1359 / \
1360 RET_OBJ ...
a1ab4c31 1361
d47d0a8d
EB
1362 As a consequence, type consistency dictates that we use the type
1363 of the RET_OBJ as the operation type. */
1364 tree operation_type = TREE_TYPE (ret_obj);
a1ab4c31 1365
d47d0a8d
EB
1366 /* Convert the right operand to the operation type. Note that it's the
1367 same transformation as in the MODIFY_EXPR case of build_binary_op,
1368 with the assumption that the type cannot involve a placeholder. */
a1ab4c31
AC
1369 if (operation_type != TREE_TYPE (ret_val))
1370 ret_val = convert (operation_type, ret_val);
1371
d47d0a8d 1372 result_expr = build2 (MODIFY_EXPR, operation_type, ret_obj, ret_val);
a1ab4c31
AC
1373 }
1374 else
d47d0a8d 1375 result_expr = ret_obj;
a1ab4c31
AC
1376
1377 return build1 (RETURN_EXPR, void_type_node, result_expr);
1378}
1379\f
1380/* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1381 the CALL_EXPR. */
1382
1383tree
1384build_call_1_expr (tree fundecl, tree arg)
1385{
1386 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1387 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1388 1, arg);
1389 TREE_SIDE_EFFECTS (call) = 1;
1390 return call;
1391}
1392
1393/* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1394 the CALL_EXPR. */
1395
1396tree
1397build_call_2_expr (tree fundecl, tree arg1, tree arg2)
1398{
1399 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1400 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1401 2, arg1, arg2);
1402 TREE_SIDE_EFFECTS (call) = 1;
1403 return call;
1404}
1405
1406/* Likewise to call FUNDECL with no arguments. */
1407
1408tree
1409build_call_0_expr (tree fundecl)
1410{
1411 /* We rely on build_call_nary to compute TREE_SIDE_EFFECTS. This makes
1412 it possible to propagate DECL_IS_PURE on parameterless functions. */
1413 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (fundecl)),
1414 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1415 0);
1416 return call;
1417}
1418\f
1419/* Call a function that raises an exception and pass the line number and file
1420 name, if requested. MSG says which exception function to call.
1421
1422 GNAT_NODE is the gnat node conveying the source location for which the
1423 error should be signaled, or Empty in which case the error is signaled on
1424 the current ref_file_name/input_line.
1425
1426 KIND says which kind of exception this is for
1427 (N_Raise_{Constraint,Storage,Program}_Error). */
1428
1429tree
1430build_call_raise (int msg, Node_Id gnat_node, char kind)
1431{
1432 tree fndecl = gnat_raise_decls[msg];
1433 tree label = get_exception_label (kind);
1434 tree filename;
1435 int line_number;
1436 const char *str;
1437 int len;
1438
1439 /* If this is to be done as a goto, handle that case. */
1440 if (label)
1441 {
1442 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1443 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1444
1445 /* If Local_Raise is present, generate
1446 Local_Raise (exception'Identity); */
1447 if (Present (local_raise))
1448 {
1449 tree gnu_local_raise
1450 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1451 tree gnu_exception_entity
1452 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1453 tree gnu_call
1454 = build_call_1_expr (gnu_local_raise,
1455 build_unary_op (ADDR_EXPR, NULL_TREE,
1456 gnu_exception_entity));
1457
1458 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1459 gnu_call, gnu_result);}
1460
1461 return gnu_result;
1462 }
1463
1464 str
1465 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1466 ? ""
1467 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1468 ? IDENTIFIER_POINTER
1469 (get_identifier (Get_Name_String
1470 (Debug_Source_Name
1471 (Get_Source_File_Index (Sloc (gnat_node))))))
1472 : ref_filename;
1473
26383c64 1474 len = strlen (str);
a1ab4c31
AC
1475 filename = build_string (len, str);
1476 line_number
1477 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1478 ? Get_Logical_Line_Number (Sloc(gnat_node)) : input_line;
1479
1480 TREE_TYPE (filename)
26383c64 1481 = build_array_type (char_type_node, build_index_type (size_int (len)));
a1ab4c31
AC
1482
1483 return
1484 build_call_2_expr (fndecl,
1485 build1 (ADDR_EXPR, build_pointer_type (char_type_node),
1486 filename),
1487 build_int_cst (NULL_TREE, line_number));
1488}
1489\f
1490/* qsort comparer for the bit positions of two constructor elements
1491 for record components. */
1492
1493static int
1494compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1495{
1496 const_tree const elmt1 = * (const_tree const *) rt1;
1497 const_tree const elmt2 = * (const_tree const *) rt2;
1498 const_tree const field1 = TREE_PURPOSE (elmt1);
1499 const_tree const field2 = TREE_PURPOSE (elmt2);
1500 const int ret
1501 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1502
1503 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1504}
1505
1506/* Return a CONSTRUCTOR of TYPE whose list is LIST. */
1507
1508tree
1509gnat_build_constructor (tree type, tree list)
1510{
a1ab4c31
AC
1511 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1512 bool side_effects = false;
324c9b02
OH
1513 tree elmt, result;
1514 int n_elmts;
a1ab4c31
AC
1515
1516 /* Scan the elements to see if they are all constant or if any has side
1517 effects, to let us set global flags on the resulting constructor. Count
1518 the elements along the way for possible sorting purposes below. */
1519 for (n_elmts = 0, elmt = list; elmt; elmt = TREE_CHAIN (elmt), n_elmts ++)
1520 {
324c9b02
OH
1521 tree obj = TREE_PURPOSE (elmt);
1522 tree val = TREE_VALUE (elmt);
1523
1524 /* The predicate must be in keeping with output_constructor. */
1525 if (!TREE_CONSTANT (val)
a1ab4c31 1526 || (TREE_CODE (type) == RECORD_TYPE
324c9b02
OH
1527 && CONSTRUCTOR_BITFIELD_P (obj)
1528 && !initializer_constant_valid_for_bitfield_p (val))
1529 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
a1ab4c31
AC
1530 allconstant = false;
1531
324c9b02 1532 if (TREE_SIDE_EFFECTS (val))
a1ab4c31 1533 side_effects = true;
a1ab4c31
AC
1534 }
1535
1536 /* For record types with constant components only, sort field list
1537 by increasing bit position. This is necessary to ensure the
1538 constructor can be output as static data. */
1539 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1540 {
1541 /* Fill an array with an element tree per index, and ask qsort to order
1542 them according to what a bitpos comparison function says. */
1543 tree *gnu_arr = (tree *) alloca (sizeof (tree) * n_elmts);
1544 int i;
1545
1546 for (i = 0, elmt = list; elmt; elmt = TREE_CHAIN (elmt), i++)
1547 gnu_arr[i] = elmt;
1548
1549 qsort (gnu_arr, n_elmts, sizeof (tree), compare_elmt_bitpos);
1550
1551 /* Then reconstruct the list from the sorted array contents. */
1552 list = NULL_TREE;
1553 for (i = n_elmts - 1; i >= 0; i--)
1554 {
1555 TREE_CHAIN (gnu_arr[i]) = list;
1556 list = gnu_arr[i];
1557 }
1558 }
1559
1560 result = build_constructor_from_list (type, list);
1561 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1562 TREE_SIDE_EFFECTS (result) = side_effects;
1563 TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1564 return result;
1565}
1566\f
1567/* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1568 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1569 for the field. Don't fold the result if NO_FOLD_P is true.
1570
1571 We also handle the fact that we might have been passed a pointer to the
1572 actual record and know how to look for fields in variant parts. */
1573
1574static tree
1575build_simple_component_ref (tree record_variable, tree component,
1576 tree field, bool no_fold_p)
1577{
1578 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1579 tree ref, inner_variable;
1580
1581 gcc_assert ((TREE_CODE (record_type) == RECORD_TYPE
1582 || TREE_CODE (record_type) == UNION_TYPE
1583 || TREE_CODE (record_type) == QUAL_UNION_TYPE)
1584 && TYPE_SIZE (record_type)
1585 && (component != 0) != (field != 0));
1586
1587 /* If no field was specified, look for a field with the specified name
1588 in the current record only. */
1589 if (!field)
1590 for (field = TYPE_FIELDS (record_type); field;
1591 field = TREE_CHAIN (field))
1592 if (DECL_NAME (field) == component)
1593 break;
1594
1595 if (!field)
1596 return NULL_TREE;
1597
1598 /* If this field is not in the specified record, see if we can find
1599 something in the record whose original field is the same as this one. */
1600 if (DECL_CONTEXT (field) != record_type)
1601 /* Check if there is a field with name COMPONENT in the record. */
1602 {
1603 tree new_field;
1604
1605 /* First loop thru normal components. */
a1ab4c31
AC
1606 for (new_field = TYPE_FIELDS (record_type); new_field;
1607 new_field = TREE_CHAIN (new_field))
cb3d597d 1608 if (SAME_FIELD_P (field, new_field))
a1ab4c31
AC
1609 break;
1610
1611 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1612 the component in the first search. Doing this search in 2 steps
1613 is required to avoiding hidden homonymous fields in the
1614 _Parent field. */
a1ab4c31
AC
1615 if (!new_field)
1616 for (new_field = TYPE_FIELDS (record_type); new_field;
1617 new_field = TREE_CHAIN (new_field))
1618 if (DECL_INTERNAL_P (new_field))
1619 {
1620 tree field_ref
1621 = build_simple_component_ref (record_variable,
1622 NULL_TREE, new_field, no_fold_p);
1623 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1624 no_fold_p);
1625
1626 if (ref)
1627 return ref;
1628 }
1629
1630 field = new_field;
1631 }
1632
1633 if (!field)
1634 return NULL_TREE;
1635
1636 /* If the field's offset has overflowed, do not attempt to access it
1637 as doing so may trigger sanity checks deeper in the back-end.
1638 Note that we don't need to warn since this will be done on trying
1639 to declare the object. */
1640 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1641 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1642 return NULL_TREE;
1643
1644 /* Look through conversion between type variants. Note that this
1645 is transparent as far as the field is concerned. */
1646 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1647 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1648 == record_type)
1649 inner_variable = TREE_OPERAND (record_variable, 0);
1650 else
1651 inner_variable = record_variable;
1652
1653 ref = build3 (COMPONENT_REF, TREE_TYPE (field), inner_variable, field,
1654 NULL_TREE);
1655
1656 if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1657 TREE_READONLY (ref) = 1;
1658 if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1659 || TYPE_VOLATILE (record_type))
1660 TREE_THIS_VOLATILE (ref) = 1;
1661
1662 if (no_fold_p)
1663 return ref;
1664
1665 /* The generic folder may punt in this case because the inner array type
1666 can be self-referential, but folding is in fact not problematic. */
1667 else if (TREE_CODE (record_variable) == CONSTRUCTOR
1668 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record_variable)))
1669 {
1670 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (record_variable);
1671 unsigned HOST_WIDE_INT idx;
1672 tree index, value;
1673 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
1674 if (index == field)
1675 return value;
1676 return ref;
1677 }
1678
1679 else
1680 return fold (ref);
1681}
1682\f
1683/* Like build_simple_component_ref, except that we give an error if the
1684 reference could not be found. */
1685
1686tree
1687build_component_ref (tree record_variable, tree component,
1688 tree field, bool no_fold_p)
1689{
1690 tree ref = build_simple_component_ref (record_variable, component, field,
1691 no_fold_p);
1692
1693 if (ref)
1694 return ref;
1695
c1abd261
EB
1696 /* If FIELD was specified, assume this is an invalid user field so raise
1697 Constraint_Error. Otherwise, we have no type to return so abort. */
a1ab4c31
AC
1698 gcc_assert (field);
1699 return build1 (NULL_EXPR, TREE_TYPE (field),
1700 build_call_raise (CE_Discriminant_Check_Failed, Empty,
1701 N_Raise_Constraint_Error));
1702}
1703\f
ff346f70
OH
1704/* Helper for build_call_alloc_dealloc, with arguments to be interpreted
1705 identically. Process the case where a GNAT_PROC to call is provided. */
a1ab4c31 1706
ff346f70
OH
1707static inline tree
1708build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
1709 Entity_Id gnat_proc, Entity_Id gnat_pool)
a1ab4c31 1710{
ff346f70
OH
1711 tree gnu_proc = gnat_to_gnu (gnat_proc);
1712 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1713 tree gnu_call;
a1ab4c31 1714
ff346f70
OH
1715 /* The storage pools are obviously always tagged types, but the
1716 secondary stack uses the same mechanism and is not tagged. */
1717 if (Is_Tagged_Type (Etype (gnat_pool)))
a1ab4c31 1718 {
ff346f70
OH
1719 /* The size is the third parameter; the alignment is the
1720 same type. */
1721 Entity_Id gnat_size_type
1722 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1723 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1724
1725 tree gnu_pool = gnat_to_gnu (gnat_pool);
1726 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1727 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
1728
1729 gnu_size = convert (gnu_size_type, gnu_size);
1730 gnu_align = convert (gnu_size_type, gnu_align);
1731
1732 /* The first arg is always the address of the storage pool; next
1733 comes the address of the object, for a deallocator, then the
1734 size and alignment. */
1735 if (gnu_obj)
1736 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1737 gnu_proc_addr, 4, gnu_pool_addr,
1738 gnu_obj, gnu_size, gnu_align);
1739 else
1740 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1741 gnu_proc_addr, 3, gnu_pool_addr,
1742 gnu_size, gnu_align);
1743 }
a1ab4c31 1744
ff346f70
OH
1745 /* Secondary stack case. */
1746 else
1747 {
1748 /* The size is the second parameter. */
1749 Entity_Id gnat_size_type
1750 = Etype (Next_Formal (First_Formal (gnat_proc)));
1751 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1752
1753 gnu_size = convert (gnu_size_type, gnu_size);
1754
1755 /* The first arg is the address of the object, for a deallocator,
1756 then the size. */
1757 if (gnu_obj)
1758 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1759 gnu_proc_addr, 2, gnu_obj, gnu_size);
a1ab4c31 1760 else
ff346f70
OH
1761 gnu_call = build_call_nary (TREE_TYPE (TREE_TYPE (gnu_proc)),
1762 gnu_proc_addr, 1, gnu_size);
a1ab4c31
AC
1763 }
1764
ff346f70
OH
1765 TREE_SIDE_EFFECTS (gnu_call) = 1;
1766 return gnu_call;
1767}
1768
1769/* Helper for build_call_alloc_dealloc, to build and return an allocator for
1770 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
1771 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
1772 latter offers. */
1773
1774static inline tree
1775maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
1776{
1777 /* When the DATA_TYPE alignment is stricter than what malloc offers
1778 (super-aligned case), we allocate an "aligning" wrapper type and return
1779 the address of its single data field with the malloc's return value
1780 stored just in front. */
1781
1782 unsigned int data_align = TYPE_ALIGN (data_type);
1783 unsigned int default_allocator_alignment
1784 = get_target_default_allocator_alignment () * BITS_PER_UNIT;
1785
1786 tree aligning_type
1787 = ((data_align > default_allocator_alignment)
1788 ? make_aligning_type (data_type, data_align, data_size,
1789 default_allocator_alignment,
1790 POINTER_SIZE / BITS_PER_UNIT)
1791 : NULL_TREE);
a1ab4c31 1792
ff346f70
OH
1793 tree size_to_malloc
1794 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
6f61bd41 1795
ff346f70 1796 tree malloc_ptr;
6f61bd41
EB
1797
1798 /* On VMS, if 64-bit memory is disabled or pointers are 64-bit and the
1799 allocator size is 32-bit or Convention C, allocate 32-bit memory. */
1800 if (TARGET_ABI_OPEN_VMS
1801 && (!TARGET_MALLOC64
1802 || (POINTER_SIZE == 64
1803 && (UI_To_Int (Esize (Etype (gnat_node))) == 32
1804 || Convention (Etype (gnat_node)) == Convention_C))))
ff346f70
OH
1805 malloc_ptr = build_call_1_expr (malloc32_decl, size_to_malloc);
1806 else
1807 malloc_ptr = build_call_1_expr (malloc_decl, size_to_malloc);
1808
1809 if (aligning_type)
1810 {
1811 /* Latch malloc's return value and get a pointer to the aligning field
1812 first. */
7d7a1fe8 1813 tree storage_ptr = gnat_protect_expr (malloc_ptr);
ff346f70
OH
1814
1815 tree aligning_record_addr
1816 = convert (build_pointer_type (aligning_type), storage_ptr);
1817
1818 tree aligning_record
1819 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
1820
1821 tree aligning_field
1822 = build_component_ref (aligning_record, NULL_TREE,
3cd64bab 1823 TYPE_FIELDS (aligning_type), false);
ff346f70
OH
1824
1825 tree aligning_field_addr
1826 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
1827
1828 /* Then arrange to store the allocator's return value ahead
1829 and return. */
1830 tree storage_ptr_slot_addr
1831 = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1832 convert (ptr_void_type_node, aligning_field_addr),
437926c0
JJ
1833 size_int (-(HOST_WIDE_INT) POINTER_SIZE
1834 / BITS_PER_UNIT));
ff346f70
OH
1835
1836 tree storage_ptr_slot
1837 = build_unary_op (INDIRECT_REF, NULL_TREE,
1838 convert (build_pointer_type (ptr_void_type_node),
1839 storage_ptr_slot_addr));
1840
1841 return
1842 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
1843 build_binary_op (MODIFY_EXPR, NULL_TREE,
1844 storage_ptr_slot, storage_ptr),
1845 aligning_field_addr);
1846 }
1847 else
1848 return malloc_ptr;
1849}
1850
1851/* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
1852 designated by DATA_PTR using the __gnat_free entry point. */
1853
1854static inline tree
1855maybe_wrap_free (tree data_ptr, tree data_type)
1856{
1857 /* In the regular alignment case, we pass the data pointer straight to free.
1858 In the superaligned case, we need to retrieve the initial allocator
1859 return value, stored in front of the data block at allocation time. */
1860
1861 unsigned int data_align = TYPE_ALIGN (data_type);
1862 unsigned int default_allocator_alignment
1863 = get_target_default_allocator_alignment () * BITS_PER_UNIT;
6f61bd41 1864
ff346f70
OH
1865 tree free_ptr;
1866
1867 if (data_align > default_allocator_alignment)
1868 {
1869 /* DATA_FRONT_PTR (void *)
1870 = (void *)DATA_PTR - (void *)sizeof (void *)) */
1871 tree data_front_ptr
1872 = build_binary_op
1873 (POINTER_PLUS_EXPR, ptr_void_type_node,
1874 convert (ptr_void_type_node, data_ptr),
437926c0 1875 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
ff346f70
OH
1876
1877 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
1878 free_ptr
1879 = build_unary_op
1880 (INDIRECT_REF, NULL_TREE,
1881 convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
1882 }
1883 else
1884 free_ptr = data_ptr;
1885
1886 return build_call_1_expr (free_decl, free_ptr);
1887}
1888
1889/* Build a GCC tree to call an allocation or deallocation function.
1890 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
1891 generate an allocator.
1892
1893 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
1894 object type, used to determine the to-be-honored address alignment.
1895 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
1896 pool to use. If not present, malloc and free are used. GNAT_NODE is used
1897 to provide an error location for restriction violation messages. */
1898
1899tree
1900build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
1901 Entity_Id gnat_proc, Entity_Id gnat_pool,
1902 Node_Id gnat_node)
1903{
1904 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
1905
1906 /* Explicit proc to call ? This one is assumed to deal with the type
1907 alignment constraints. */
1908 if (Present (gnat_proc))
1909 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
1910 gnat_proc, gnat_pool);
1911
1912 /* Otherwise, object to "free" or "malloc" with possible special processing
1913 for alignments stricter than what the default allocator honors. */
1914 else if (gnu_obj)
1915 return maybe_wrap_free (gnu_obj, gnu_type);
1916 else
1917 {
1918 /* Assert that we no longer can be called with this special pool. */
1919 gcc_assert (gnat_pool != -1);
1920
1921 /* Check that we aren't violating the associated restriction. */
1922 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
1923 Check_No_Implicit_Heap_Alloc (gnat_node);
1924
1925 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
1926 }
a1ab4c31
AC
1927}
1928\f
1929/* Build a GCC tree to correspond to allocating an object of TYPE whose
1930 initial value is INIT, if INIT is nonzero. Convert the expression to
1931 RESULT_TYPE, which must be some type of pointer. Return the tree.
6f61bd41 1932
a1ab4c31
AC
1933 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
1934 the storage pool to use. GNAT_NODE is used to provide an error
6f61bd41 1935 location for restriction violation messages. If IGNORE_INIT_TYPE is
a1ab4c31
AC
1936 true, ignore the type of INIT for the purpose of determining the size;
1937 this will cause the maximum size to be allocated if TYPE is of
1938 self-referential size. */
1939
1940tree
1941build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
1942 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
1943{
1944 tree size = TYPE_SIZE_UNIT (type);
1945 tree result;
a1ab4c31
AC
1946
1947 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
1948 if (init && TREE_CODE (init) == NULL_EXPR)
1949 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
1950
1951 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
1952 sizes of the object and its template. Allocate the whole thing and
1953 fill in the parts that are known. */
315cff15 1954 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
a1ab4c31
AC
1955 {
1956 tree storage_type
1957 = build_unc_object_type_from_ptr (result_type, type,
1958 get_identifier ("ALLOC"));
1959 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
1960 tree storage_ptr_type = build_pointer_type (storage_type);
1961 tree storage;
1962 tree template_cons = NULL_TREE;
1963
1964 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
1965 init);
1966
1967 /* If the size overflows, pass -1 so the allocator will raise
1968 storage error. */
1969 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1970 size = ssize_int (-1);
1971
ff346f70 1972 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
a1ab4c31 1973 gnat_proc, gnat_pool, gnat_node);
7d7a1fe8 1974 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
a1ab4c31 1975
315cff15 1976 if (TYPE_IS_PADDING_P (type))
a1ab4c31
AC
1977 {
1978 type = TREE_TYPE (TYPE_FIELDS (type));
a1ab4c31
AC
1979 if (init)
1980 init = convert (type, init);
1981 }
1982
1983 /* If there is an initializing expression, make a constructor for
1984 the entire object including the bounds and copy it into the
1985 object. If there is no initializing expression, just set the
1986 bounds. */
1987 if (init)
1988 {
1989 template_cons = tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type)),
1990 init, NULL_TREE);
1991 template_cons = tree_cons (TYPE_FIELDS (storage_type),
1992 build_template (template_type, type,
1993 init),
1994 template_cons);
1995
1996 return convert
1997 (result_type,
1998 build2 (COMPOUND_EXPR, storage_ptr_type,
1999 build_binary_op
2000 (MODIFY_EXPR, storage_type,
2001 build_unary_op (INDIRECT_REF, NULL_TREE,
2002 convert (storage_ptr_type, storage)),
2003 gnat_build_constructor (storage_type, template_cons)),
2004 convert (storage_ptr_type, storage)));
2005 }
2006 else
2007 return build2
2008 (COMPOUND_EXPR, result_type,
2009 build_binary_op
2010 (MODIFY_EXPR, template_type,
2011 build_component_ref
2012 (build_unary_op (INDIRECT_REF, NULL_TREE,
2013 convert (storage_ptr_type, storage)),
3cd64bab 2014 NULL_TREE, TYPE_FIELDS (storage_type), false),
a1ab4c31
AC
2015 build_template (template_type, type, NULL_TREE)),
2016 convert (result_type, convert (storage_ptr_type, storage)));
2017 }
2018
2019 /* If we have an initializing expression, see if its size is simpler
2020 than the size from the type. */
2021 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2022 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2023 || CONTAINS_PLACEHOLDER_P (size)))
2024 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2025
2026 /* If the size is still self-referential, reference the initializing
2027 expression, if it is present. If not, this must have been a
2028 call to allocate a library-level object, in which case we use
2029 the maximum size. */
2030 if (CONTAINS_PLACEHOLDER_P (size))
2031 {
2032 if (!ignore_init_type && init)
2033 size = substitute_placeholder_in_expr (size, init);
2034 else
2035 size = max_size (size, true);
2036 }
2037
2038 /* If the size overflows, pass -1 so the allocator will raise
2039 storage error. */
2040 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2041 size = ssize_int (-1);
2042
ff346f70
OH
2043 result = convert (result_type,
2044 build_call_alloc_dealloc (NULL_TREE, size, type,
2045 gnat_proc, gnat_pool,
2046 gnat_node));
a1ab4c31 2047
ced57283
EB
2048 /* If we have an initial value, protect the new address, assign the value
2049 and return the address with a COMPOUND_EXPR. */
a1ab4c31
AC
2050 if (init)
2051 {
7d7a1fe8 2052 result = gnat_protect_expr (result);
a1ab4c31
AC
2053 result
2054 = build2 (COMPOUND_EXPR, TREE_TYPE (result),
2055 build_binary_op
2056 (MODIFY_EXPR, NULL_TREE,
2057 build_unary_op (INDIRECT_REF,
2058 TREE_TYPE (TREE_TYPE (result)), result),
2059 init),
2060 result);
2061 }
2062
2063 return convert (result_type, result);
2064}
2065\f
2066/* Fill in a VMS descriptor for EXPR and return a constructor for it.
6ca2b0a0 2067 GNAT_FORMAL is how we find the descriptor record. GNAT_ACTUAL is
819fad69
AC
2068 how we derive the source location to raise C_E on an out of range
2069 pointer. */
a1ab4c31
AC
2070
2071tree
819fad69 2072fill_vms_descriptor (tree expr, Entity_Id gnat_formal, Node_Id gnat_actual)
a1ab4c31 2073{
a1ab4c31 2074 tree field;
6ca2b0a0 2075 tree parm_decl = get_gnu_tree (gnat_formal);
a1ab4c31 2076 tree const_list = NULL_TREE;
bdc33a55
AC
2077 tree record_type = TREE_TYPE (TREE_TYPE (parm_decl));
2078 int do_range_check =
2079 strcmp ("MBO",
2080 IDENTIFIER_POINTER (DECL_NAME (TYPE_FIELDS (record_type))));
6ca2b0a0 2081
a1ab4c31
AC
2082 expr = maybe_unconstrained_array (expr);
2083 gnat_mark_addressable (expr);
2084
2085 for (field = TYPE_FIELDS (record_type); field; field = TREE_CHAIN (field))
bdc33a55
AC
2086 {
2087 tree conexpr = convert (TREE_TYPE (field),
2088 SUBSTITUTE_PLACEHOLDER_IN_EXPR
2089 (DECL_INITIAL (field), expr));
2090
2091 /* Check to ensure that only 32bit pointers are passed in
2092 32bit descriptors */
2093 if (do_range_check &&
2094 strcmp (IDENTIFIER_POINTER (DECL_NAME (field)), "POINTER") == 0)
2095 {
819fad69
AC
2096 tree pointer64type =
2097 build_pointer_type_for_mode (void_type_node, DImode, false);
2098 tree addr64expr = build_unary_op (ADDR_EXPR, pointer64type, expr);
2099 tree malloc64low =
2100 build_int_cstu (long_integer_type_node, 0x80000000);
2101
2102 add_stmt (build3 (COND_EXPR, void_type_node,
2103 build_binary_op (GE_EXPR, long_integer_type_node,
2104 convert (long_integer_type_node,
b7babd5d 2105 addr64expr),
819fad69
AC
2106 malloc64low),
2107 build_call_raise (CE_Range_Check_Failed, gnat_actual,
2108 N_Raise_Constraint_Error),
2109 NULL_TREE));
bdc33a55
AC
2110 }
2111 const_list = tree_cons (field, conexpr, const_list);
2112 }
a1ab4c31
AC
2113
2114 return gnat_build_constructor (record_type, nreverse (const_list));
2115}
2116
ced57283 2117/* Indicate that we need to take the address of T and that it therefore
a1ab4c31
AC
2118 should not be allocated in a register. Returns true if successful. */
2119
2120bool
ced57283 2121gnat_mark_addressable (tree t)
a1ab4c31 2122{
ced57283
EB
2123 while (true)
2124 switch (TREE_CODE (t))
a1ab4c31
AC
2125 {
2126 case ADDR_EXPR:
2127 case COMPONENT_REF:
2128 case ARRAY_REF:
2129 case ARRAY_RANGE_REF:
2130 case REALPART_EXPR:
2131 case IMAGPART_EXPR:
2132 case VIEW_CONVERT_EXPR:
2133 case NON_LVALUE_EXPR:
2134 CASE_CONVERT:
ced57283 2135 t = TREE_OPERAND (t, 0);
a1ab4c31
AC
2136 break;
2137
0b3467c4
EB
2138 case COMPOUND_EXPR:
2139 t = TREE_OPERAND (t, 1);
2140 break;
2141
a1ab4c31 2142 case CONSTRUCTOR:
ced57283 2143 TREE_ADDRESSABLE (t) = 1;
a1ab4c31
AC
2144 return true;
2145
2146 case VAR_DECL:
2147 case PARM_DECL:
2148 case RESULT_DECL:
ced57283 2149 TREE_ADDRESSABLE (t) = 1;
a1ab4c31
AC
2150 return true;
2151
2152 case FUNCTION_DECL:
ced57283 2153 TREE_ADDRESSABLE (t) = 1;
a1ab4c31
AC
2154 return true;
2155
2156 case CONST_DECL:
ced57283
EB
2157 return DECL_CONST_CORRESPONDING_VAR (t)
2158 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2159
a1ab4c31
AC
2160 default:
2161 return true;
2162 }
2163}
7d7a1fe8
EB
2164\f
2165/* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2166 but we know how to handle our own nodes. */
2167
2168tree
2169gnat_save_expr (tree exp)
2170{
2171 tree type = TREE_TYPE (exp);
2172 enum tree_code code = TREE_CODE (exp);
2173
2174 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2175 return exp;
2176
2177 if (code == UNCONSTRAINED_ARRAY_REF)
2178 {
2179 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2180 TREE_READONLY (t) = TYPE_READONLY (type);
2181 return t;
2182 }
2183
2184 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2185 This may be more efficient, but will also allow us to more easily find
2186 the match for the PLACEHOLDER_EXPR. */
2187 if (code == COMPONENT_REF
2188 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2189 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2190 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2191
2192 return save_expr (exp);
2193}
2194
2195/* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2196 is optimized under the assumption that EXP's value doesn't change before
2197 its subsequent reuse(s) except through its potential reevaluation. */
2198
2199tree
2200gnat_protect_expr (tree exp)
2201{
2202 tree type = TREE_TYPE (exp);
2203 enum tree_code code = TREE_CODE (exp);
2204
2205 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2206 return exp;
2207
2208 /* If EXP has no side effects, we theoritically don't need to do anything.
2209 However, we may be recursively passed more and more complex expressions
2210 involving checks which will be reused multiple times and eventually be
2211 unshared for gimplification; in order to avoid a complexity explosion
2212 at that point, we protect any expressions more complex than a simple
2213 arithmetic expression. */
2214 if (!TREE_SIDE_EFFECTS (exp)
2215 && !EXPRESSION_CLASS_P (skip_simple_arithmetic (exp)))
2216 return exp;
2217
2218 /* If this is a conversion, protect what's inside the conversion. */
2219 if (code == NON_LVALUE_EXPR
2220 || CONVERT_EXPR_CODE_P (code)
2221 || code == VIEW_CONVERT_EXPR)
2222 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2223
2224 /* If we're indirectly referencing something, we only need to protect the
2225 address since the data itself can't change in these situations. */
2226 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2227 {
2228 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2229 TREE_READONLY (t) = TYPE_READONLY (type);
2230 return t;
2231 }
2232
2233 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2234 This may be more efficient, but will also allow us to more easily find
2235 the match for the PLACEHOLDER_EXPR. */
2236 if (code == COMPONENT_REF
2237 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2238 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2239 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2240
2241 /* If this is a fat pointer or something that can be placed in a register,
2242 just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
2243 returned via invisible reference in most ABIs so the temporary will
2244 directly be filled by the callee. */
2245 if (TYPE_IS_FAT_POINTER_P (type)
2246 || TYPE_MODE (type) != BLKmode
2247 || code == CALL_EXPR)
2248 return save_expr (exp);
2249
2250 /* Otherwise reference, protect the address and dereference. */
2251 return
2252 build_unary_op (INDIRECT_REF, type,
2253 save_expr (build_unary_op (ADDR_EXPR,
2254 build_reference_type (type),
2255 exp)));
2256}
2257
2258/* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2259 argument to force evaluation of everything. */
2260
2261static tree
2262gnat_stabilize_reference_1 (tree e, bool force)
2263{
2264 enum tree_code code = TREE_CODE (e);
2265 tree type = TREE_TYPE (e);
2266 tree result;
2267
2268 /* We cannot ignore const expressions because it might be a reference
2269 to a const array but whose index contains side-effects. But we can
2270 ignore things that are actual constant or that already have been
2271 handled by this function. */
2272 if (TREE_CONSTANT (e) || code == SAVE_EXPR)
2273 return e;
2274
2275 switch (TREE_CODE_CLASS (code))
2276 {
2277 case tcc_exceptional:
2278 case tcc_declaration:
2279 case tcc_comparison:
2280 case tcc_expression:
2281 case tcc_reference:
2282 case tcc_vl_exp:
2283 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2284 fat pointer. This may be more efficient, but will also allow
2285 us to more easily find the match for the PLACEHOLDER_EXPR. */
2286 if (code == COMPONENT_REF
2287 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2288 result
2289 = build3 (code, type,
2290 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2291 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2292 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2293 so that it will only be evaluated once. */
2294 /* The tcc_reference and tcc_comparison classes could be handled as
2295 below, but it is generally faster to only evaluate them once. */
2296 else if (TREE_SIDE_EFFECTS (e) || force)
2297 return save_expr (e);
2298 else
2299 return e;
2300 break;
2301
2302 case tcc_binary:
2303 /* Recursively stabilize each operand. */
2304 result
2305 = build2 (code, type,
2306 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2307 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), force));
2308 break;
2309
2310 case tcc_unary:
2311 /* Recursively stabilize each operand. */
2312 result
2313 = build1 (code, type,
2314 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force));
2315 break;
2316
2317 default:
2318 gcc_unreachable ();
2319 }
2320
2321 /* See similar handling in gnat_stabilize_reference. */
2322 TREE_READONLY (result) = TREE_READONLY (e);
2323 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2324 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2325
2326 return result;
2327}
2328
2329/* This is equivalent to stabilize_reference in tree.c but we know how to
2330 handle our own nodes and we take extra arguments. FORCE says whether to
2331 force evaluation of everything. We set SUCCESS to true unless we walk
2332 through something we don't know how to stabilize. */
2333
2334tree
2335gnat_stabilize_reference (tree ref, bool force, bool *success)
2336{
2337 tree type = TREE_TYPE (ref);
2338 enum tree_code code = TREE_CODE (ref);
2339 tree result;
2340
2341 /* Assume we'll success unless proven otherwise. */
2342 if (success)
2343 *success = true;
2344
2345 switch (code)
2346 {
2347 case CONST_DECL:
2348 case VAR_DECL:
2349 case PARM_DECL:
2350 case RESULT_DECL:
2351 /* No action is needed in this case. */
2352 return ref;
2353
2354 case ADDR_EXPR:
2355 CASE_CONVERT:
2356 case FLOAT_EXPR:
2357 case FIX_TRUNC_EXPR:
2358 case VIEW_CONVERT_EXPR:
2359 result
2360 = build1 (code, type,
2361 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2362 success));
2363 break;
2364
2365 case INDIRECT_REF:
2366 case UNCONSTRAINED_ARRAY_REF:
2367 result = build1 (code, type,
2368 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 0),
2369 force));
2370 break;
2371
2372 case COMPONENT_REF:
2373 result = build3 (COMPONENT_REF, type,
2374 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2375 success),
2376 TREE_OPERAND (ref, 1), NULL_TREE);
2377 break;
2378
2379 case BIT_FIELD_REF:
2380 result = build3 (BIT_FIELD_REF, type,
2381 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2382 success),
2383 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2384 force),
2385 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 2),
2386 force));
2387 break;
2388
2389 case ARRAY_REF:
2390 case ARRAY_RANGE_REF:
2391 result = build4 (code, type,
2392 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2393 success),
2394 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2395 force),
2396 NULL_TREE, NULL_TREE);
2397 break;
2398
2399 case CALL_EXPR:
7d7a1fe8
EB
2400 result = gnat_stabilize_reference_1 (ref, force);
2401 break;
2402
0b3467c4
EB
2403 case COMPOUND_EXPR:
2404 result = build2 (COMPOUND_EXPR, type,
2405 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2406 success),
2407 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2408 force));
2409 break;
2410
7d7a1fe8
EB
2411 case CONSTRUCTOR:
2412 /* Constructors with 1 element are used extensively to formally
2413 convert objects to special wrapping types. */
2414 if (TREE_CODE (type) == RECORD_TYPE
2415 && VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ref)) == 1)
2416 {
2417 tree index
2418 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->index;
2419 tree value
2420 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->value;
2421 result
2422 = build_constructor_single (type, index,
2423 gnat_stabilize_reference_1 (value,
2424 force));
2425 }
2426 else
2427 {
2428 if (success)
2429 *success = false;
2430 return ref;
2431 }
2432 break;
2433
2434 case ERROR_MARK:
2435 ref = error_mark_node;
2436
2437 /* ... fall through to failure ... */
2438
2439 /* If arg isn't a kind of lvalue we recognize, make no change.
2440 Caller should recognize the error for an invalid lvalue. */
2441 default:
2442 if (success)
2443 *success = false;
2444 return ref;
2445 }
2446
2447 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2448 may not be sustained across some paths, such as the way via build1 for
2449 INDIRECT_REF. We reset those flags here in the general case, which is
2450 consistent with the GCC version of this routine.
2451
2452 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2453 paths introduce side-effects where there was none initially (e.g. if a
2454 SAVE_EXPR is built) and we also want to keep track of that. */
2455 TREE_READONLY (result) = TREE_READONLY (ref);
2456 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2457 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2458
2459 return result;
2460}
This page took 0.842795 seconds and 5 git commands to generate.