]> gcc.gnu.org Git - gcc.git/blob - gcc/ada/gcc-interface/utils2.c
ada-tree.h (TYPE_NULL_BOUNDS): New macro.
[gcc.git] / gcc / ada / gcc-interface / utils2.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2011, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "ggc.h"
33 #include "output.h"
34 #include "tree-inline.h"
35
36 #include "ada.h"
37 #include "types.h"
38 #include "atree.h"
39 #include "elists.h"
40 #include "namet.h"
41 #include "nlists.h"
42 #include "snames.h"
43 #include "stringt.h"
44 #include "uintp.h"
45 #include "fe.h"
46 #include "sinfo.h"
47 #include "einfo.h"
48 #include "ada-tree.h"
49 #include "gigi.h"
50
51 /* Return the base type of TYPE. */
52
53 tree
54 get_base_type (tree type)
55 {
56 if (TREE_CODE (type) == RECORD_TYPE
57 && TYPE_JUSTIFIED_MODULAR_P (type))
58 type = TREE_TYPE (TYPE_FIELDS (type));
59
60 while (TREE_TYPE (type)
61 && (TREE_CODE (type) == INTEGER_TYPE
62 || TREE_CODE (type) == REAL_TYPE))
63 type = TREE_TYPE (type);
64
65 return type;
66 }
67 \f
68 /* EXP is a GCC tree representing an address. See if we can find how
69 strictly the object at that address is aligned. Return that alignment
70 in bits. If we don't know anything about the alignment, return 0. */
71
72 unsigned int
73 known_alignment (tree exp)
74 {
75 unsigned int this_alignment;
76 unsigned int lhs, rhs;
77
78 switch (TREE_CODE (exp))
79 {
80 CASE_CONVERT:
81 case VIEW_CONVERT_EXPR:
82 case NON_LVALUE_EXPR:
83 /* Conversions between pointers and integers don't change the alignment
84 of the underlying object. */
85 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
86 break;
87
88 case COMPOUND_EXPR:
89 /* The value of a COMPOUND_EXPR is that of it's second operand. */
90 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
91 break;
92
93 case PLUS_EXPR:
94 case MINUS_EXPR:
95 /* If two address are added, the alignment of the result is the
96 minimum of the two alignments. */
97 lhs = known_alignment (TREE_OPERAND (exp, 0));
98 rhs = known_alignment (TREE_OPERAND (exp, 1));
99 this_alignment = MIN (lhs, rhs);
100 break;
101
102 case POINTER_PLUS_EXPR:
103 lhs = known_alignment (TREE_OPERAND (exp, 0));
104 rhs = known_alignment (TREE_OPERAND (exp, 1));
105 /* If we don't know the alignment of the offset, we assume that
106 of the base. */
107 if (rhs == 0)
108 this_alignment = lhs;
109 else
110 this_alignment = MIN (lhs, rhs);
111 break;
112
113 case COND_EXPR:
114 /* If there is a choice between two values, use the smallest one. */
115 lhs = known_alignment (TREE_OPERAND (exp, 1));
116 rhs = known_alignment (TREE_OPERAND (exp, 2));
117 this_alignment = MIN (lhs, rhs);
118 break;
119
120 case INTEGER_CST:
121 {
122 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
123 /* The first part of this represents the lowest bit in the constant,
124 but it is originally in bytes, not bits. */
125 this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
126 }
127 break;
128
129 case MULT_EXPR:
130 /* If we know the alignment of just one side, use it. Otherwise,
131 use the product of the alignments. */
132 lhs = known_alignment (TREE_OPERAND (exp, 0));
133 rhs = known_alignment (TREE_OPERAND (exp, 1));
134
135 if (lhs == 0)
136 this_alignment = rhs;
137 else if (rhs == 0)
138 this_alignment = lhs;
139 else
140 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
141 break;
142
143 case BIT_AND_EXPR:
144 /* A bit-and expression is as aligned as the maximum alignment of the
145 operands. We typically get here for a complex lhs and a constant
146 negative power of two on the rhs to force an explicit alignment, so
147 don't bother looking at the lhs. */
148 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
149 break;
150
151 case ADDR_EXPR:
152 this_alignment = expr_align (TREE_OPERAND (exp, 0));
153 break;
154
155 case CALL_EXPR:
156 {
157 tree t = maybe_inline_call_in_expr (exp);
158 if (t)
159 return known_alignment (t);
160 }
161
162 /* Fall through... */
163
164 default:
165 /* For other pointer expressions, we assume that the pointed-to object
166 is at least as aligned as the pointed-to type. Beware that we can
167 have a dummy type here (e.g. a Taft Amendment type), for which the
168 alignment is meaningless and should be ignored. */
169 if (POINTER_TYPE_P (TREE_TYPE (exp))
170 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
171 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
172 else
173 this_alignment = 0;
174 break;
175 }
176
177 return this_alignment;
178 }
179 \f
180 /* We have a comparison or assignment operation on two types, T1 and T2, which
181 are either both array types or both record types. T1 is assumed to be for
182 the left hand side operand, and T2 for the right hand side. Return the
183 type that both operands should be converted to for the operation, if any.
184 Otherwise return zero. */
185
186 static tree
187 find_common_type (tree t1, tree t2)
188 {
189 /* ??? As of today, various constructs lead to here with types of different
190 sizes even when both constants (e.g. tagged types, packable vs regular
191 component types, padded vs unpadded types, ...). While some of these
192 would better be handled upstream (types should be made consistent before
193 calling into build_binary_op), some others are really expected and we
194 have to be careful. */
195
196 /* We must avoid writing more than what the target can hold if this is for
197 an assignment and the case of tagged types is handled in build_binary_op
198 so we use the lhs type if it is known to be smaller or of constant size
199 and the rhs type is not, whatever the modes. We also force t1 in case of
200 constant size equality to minimize occurrences of view conversions on the
201 lhs of an assignment, except for the case of record types with a variant
202 part on the lhs but not on the rhs to make the conversion simpler. */
203 if (TREE_CONSTANT (TYPE_SIZE (t1))
204 && (!TREE_CONSTANT (TYPE_SIZE (t2))
205 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
206 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
207 && !(TREE_CODE (t1) == RECORD_TYPE
208 && TREE_CODE (t2) == RECORD_TYPE
209 && get_variant_part (t1) != NULL_TREE
210 && get_variant_part (t2) == NULL_TREE))))
211 return t1;
212
213 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
214 that we will not have any alignment problems since, if we did, the
215 non-BLKmode type could not have been used. */
216 if (TYPE_MODE (t1) != BLKmode)
217 return t1;
218
219 /* If the rhs type is of constant size, use it whatever the modes. At
220 this point it is known to be smaller, or of constant size and the
221 lhs type is not. */
222 if (TREE_CONSTANT (TYPE_SIZE (t2)))
223 return t2;
224
225 /* Otherwise, if the rhs type is non-BLKmode, use it. */
226 if (TYPE_MODE (t2) != BLKmode)
227 return t2;
228
229 /* In this case, both types have variable size and BLKmode. It's
230 probably best to leave the "type mismatch" because changing it
231 could cause a bad self-referential reference. */
232 return NULL_TREE;
233 }
234 \f
235 /* Return an expression tree representing an equality comparison of A1 and A2,
236 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
237
238 Two arrays are equal in one of two ways: (1) if both have zero length in
239 some dimension (not necessarily the same dimension) or (2) if the lengths
240 in each dimension are equal and the data is equal. We perform the length
241 tests in as efficient a manner as possible. */
242
243 static tree
244 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
245 {
246 tree result = convert (result_type, boolean_true_node);
247 tree a1_is_null = convert (result_type, boolean_false_node);
248 tree a2_is_null = convert (result_type, boolean_false_node);
249 tree t1 = TREE_TYPE (a1);
250 tree t2 = TREE_TYPE (a2);
251 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
252 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
253 bool length_zero_p = false;
254
255 /* If either operand has side-effects, they have to be evaluated only once
256 in spite of the multiple references to the operand in the comparison. */
257 if (a1_side_effects_p)
258 a1 = gnat_protect_expr (a1);
259
260 if (a2_side_effects_p)
261 a2 = gnat_protect_expr (a2);
262
263 /* Process each dimension separately and compare the lengths. If any
264 dimension has a length known to be zero, set LENGTH_ZERO_P to true
265 in order to suppress the comparison of the data at the end. */
266 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
267 {
268 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
269 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
270 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
271 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
272 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
273 size_one_node);
274 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
275 size_one_node);
276 tree comparison, this_a1_is_null, this_a2_is_null;
277
278 /* If the length of the first array is a constant, swap our operands
279 unless the length of the second array is the constant zero. */
280 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
281 {
282 tree tem;
283 bool btem;
284
285 tem = a1, a1 = a2, a2 = tem;
286 tem = t1, t1 = t2, t2 = tem;
287 tem = lb1, lb1 = lb2, lb2 = tem;
288 tem = ub1, ub1 = ub2, ub2 = tem;
289 tem = length1, length1 = length2, length2 = tem;
290 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
291 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
292 a2_side_effects_p = btem;
293 }
294
295 /* If the length of the second array is the constant zero, we can just
296 use the original stored bounds for the first array and see whether
297 last < first holds. */
298 if (integer_zerop (length2))
299 {
300 length_zero_p = true;
301
302 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
303 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
304
305 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
306 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
307 if (EXPR_P (comparison))
308 SET_EXPR_LOCATION (comparison, loc);
309
310 this_a1_is_null = comparison;
311 this_a2_is_null = convert (result_type, boolean_true_node);
312 }
313
314 /* Otherwise, if the length is some other constant value, we know that
315 this dimension in the second array cannot be superflat, so we can
316 just use its length computed from the actual stored bounds. */
317 else if (TREE_CODE (length2) == INTEGER_CST)
318 {
319 tree bt;
320
321 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
322 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
323 /* Note that we know that UB2 and LB2 are constant and hence
324 cannot contain a PLACEHOLDER_EXPR. */
325 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
326 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
327 bt = get_base_type (TREE_TYPE (ub1));
328
329 comparison
330 = fold_build2_loc (loc, EQ_EXPR, result_type,
331 build_binary_op (MINUS_EXPR, bt, ub1, lb1),
332 build_binary_op (MINUS_EXPR, bt, ub2, lb2));
333 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
334 if (EXPR_P (comparison))
335 SET_EXPR_LOCATION (comparison, loc);
336
337 this_a1_is_null
338 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
339
340 this_a2_is_null = convert (result_type, boolean_false_node);
341 }
342
343 /* Otherwise, compare the computed lengths. */
344 else
345 {
346 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
347 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
348
349 comparison
350 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
351
352 /* If the length expression is of the form (cond ? val : 0), assume
353 that cond is equivalent to (length != 0). That's guaranteed by
354 construction of the array types in gnat_to_gnu_entity. */
355 if (TREE_CODE (length1) == COND_EXPR
356 && integer_zerop (TREE_OPERAND (length1, 2)))
357 this_a1_is_null
358 = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
359 else
360 this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
361 length1, size_zero_node);
362
363 /* Likewise for the second array. */
364 if (TREE_CODE (length2) == COND_EXPR
365 && integer_zerop (TREE_OPERAND (length2, 2)))
366 this_a2_is_null
367 = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
368 else
369 this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
370 length2, size_zero_node);
371 }
372
373 /* Append expressions for this dimension to the final expressions. */
374 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
375 result, comparison);
376
377 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
378 this_a1_is_null, a1_is_null);
379
380 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
381 this_a2_is_null, a2_is_null);
382
383 t1 = TREE_TYPE (t1);
384 t2 = TREE_TYPE (t2);
385 }
386
387 /* Unless the length of some dimension is known to be zero, compare the
388 data in the array. */
389 if (!length_zero_p)
390 {
391 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
392 tree comparison;
393
394 if (type)
395 {
396 a1 = convert (type, a1),
397 a2 = convert (type, a2);
398 }
399
400 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
401
402 result
403 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
404 }
405
406 /* The result is also true if both sizes are zero. */
407 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
408 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
409 a1_is_null, a2_is_null),
410 result);
411
412 /* If either operand has side-effects, they have to be evaluated before
413 starting the comparison above since the place they would be otherwise
414 evaluated could be wrong. */
415 if (a1_side_effects_p)
416 result = build2 (COMPOUND_EXPR, result_type, a1, result);
417
418 if (a2_side_effects_p)
419 result = build2 (COMPOUND_EXPR, result_type, a2, result);
420
421 return result;
422 }
423
424 /* Return an expression tree representing an equality comparison of P1 and P2,
425 two objects of fat pointer type. The result should be of type RESULT_TYPE.
426
427 Two fat pointers are equal in one of two ways: (1) if both have a null
428 pointer to the array or (2) if they contain the same couple of pointers.
429 We perform the comparison in as efficient a manner as possible. */
430
431 static tree
432 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
433 {
434 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
435 tree p1_array_is_null, p2_array_is_null;
436
437 /* If either operand has side-effects, they have to be evaluated only once
438 in spite of the multiple references to the operand in the comparison. */
439 p1 = gnat_protect_expr (p1);
440 p2 = gnat_protect_expr (p2);
441
442 /* The constant folder doesn't fold fat pointer types so we do it here. */
443 if (TREE_CODE (p1) == CONSTRUCTOR)
444 p1_array = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p1), 0)->value;
445 else
446 p1_array = build_component_ref (p1, NULL_TREE,
447 TYPE_FIELDS (TREE_TYPE (p1)), true);
448
449 p1_array_is_null
450 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
451 fold_convert_loc (loc, TREE_TYPE (p1_array),
452 null_pointer_node));
453
454 if (TREE_CODE (p2) == CONSTRUCTOR)
455 p2_array = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p2), 0)->value;
456 else
457 p2_array = build_component_ref (p2, NULL_TREE,
458 TYPE_FIELDS (TREE_TYPE (p2)), true);
459
460 p2_array_is_null
461 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
462 fold_convert_loc (loc, TREE_TYPE (p2_array),
463 null_pointer_node));
464
465 /* If one of the pointers to the array is null, just compare the other. */
466 if (integer_zerop (p1_array))
467 return p2_array_is_null;
468 else if (integer_zerop (p2_array))
469 return p1_array_is_null;
470
471 /* Otherwise, do the fully-fledged comparison. */
472 same_array
473 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
474
475 if (TREE_CODE (p1) == CONSTRUCTOR)
476 p1_bounds = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p1), 1)->value;
477 else
478 p1_bounds
479 = build_component_ref (p1, NULL_TREE,
480 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))), true);
481
482 if (TREE_CODE (p2) == CONSTRUCTOR)
483 p2_bounds = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p2), 1)->value;
484 else
485 p2_bounds
486 = build_component_ref (p2, NULL_TREE,
487 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))), true);
488
489 same_bounds
490 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
491
492 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
493 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
494 build_binary_op (TRUTH_ORIF_EXPR, result_type,
495 p1_array_is_null, same_bounds));
496 }
497 \f
498 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
499 type TYPE. We know that TYPE is a modular type with a nonbinary
500 modulus. */
501
502 static tree
503 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
504 tree rhs)
505 {
506 tree modulus = TYPE_MODULUS (type);
507 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
508 unsigned int precision;
509 bool unsignedp = true;
510 tree op_type = type;
511 tree result;
512
513 /* If this is an addition of a constant, convert it to a subtraction
514 of a constant since we can do that faster. */
515 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
516 {
517 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
518 op_code = MINUS_EXPR;
519 }
520
521 /* For the logical operations, we only need PRECISION bits. For
522 addition and subtraction, we need one more and for multiplication we
523 need twice as many. But we never want to make a size smaller than
524 our size. */
525 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
526 needed_precision += 1;
527 else if (op_code == MULT_EXPR)
528 needed_precision *= 2;
529
530 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
531
532 /* Unsigned will do for everything but subtraction. */
533 if (op_code == MINUS_EXPR)
534 unsignedp = false;
535
536 /* If our type is the wrong signedness or isn't wide enough, make a new
537 type and convert both our operands to it. */
538 if (TYPE_PRECISION (op_type) < precision
539 || TYPE_UNSIGNED (op_type) != unsignedp)
540 {
541 /* Copy the node so we ensure it can be modified to make it modular. */
542 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
543 modulus = convert (op_type, modulus);
544 SET_TYPE_MODULUS (op_type, modulus);
545 TYPE_MODULAR_P (op_type) = 1;
546 lhs = convert (op_type, lhs);
547 rhs = convert (op_type, rhs);
548 }
549
550 /* Do the operation, then we'll fix it up. */
551 result = fold_build2 (op_code, op_type, lhs, rhs);
552
553 /* For multiplication, we have no choice but to do a full modulus
554 operation. However, we want to do this in the narrowest
555 possible size. */
556 if (op_code == MULT_EXPR)
557 {
558 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
559 modulus = convert (div_type, modulus);
560 SET_TYPE_MODULUS (div_type, modulus);
561 TYPE_MODULAR_P (div_type) = 1;
562 result = convert (op_type,
563 fold_build2 (TRUNC_MOD_EXPR, div_type,
564 convert (div_type, result), modulus));
565 }
566
567 /* For subtraction, add the modulus back if we are negative. */
568 else if (op_code == MINUS_EXPR)
569 {
570 result = gnat_protect_expr (result);
571 result = fold_build3 (COND_EXPR, op_type,
572 fold_build2 (LT_EXPR, boolean_type_node, result,
573 convert (op_type, integer_zero_node)),
574 fold_build2 (PLUS_EXPR, op_type, result, modulus),
575 result);
576 }
577
578 /* For the other operations, subtract the modulus if we are >= it. */
579 else
580 {
581 result = gnat_protect_expr (result);
582 result = fold_build3 (COND_EXPR, op_type,
583 fold_build2 (GE_EXPR, boolean_type_node,
584 result, modulus),
585 fold_build2 (MINUS_EXPR, op_type,
586 result, modulus),
587 result);
588 }
589
590 return convert (type, result);
591 }
592 \f
593 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
594 desired for the result. Usually the operation is to be performed
595 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
596 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
597 case the type to be used will be derived from the operands.
598
599 This function is very much unlike the ones for C and C++ since we
600 have already done any type conversion and matching required. All we
601 have to do here is validate the work done by SEM and handle subtypes. */
602
603 tree
604 build_binary_op (enum tree_code op_code, tree result_type,
605 tree left_operand, tree right_operand)
606 {
607 tree left_type = TREE_TYPE (left_operand);
608 tree right_type = TREE_TYPE (right_operand);
609 tree left_base_type = get_base_type (left_type);
610 tree right_base_type = get_base_type (right_type);
611 tree operation_type = result_type;
612 tree best_type = NULL_TREE;
613 tree modulus, result;
614 bool has_side_effects = false;
615
616 if (operation_type
617 && TREE_CODE (operation_type) == RECORD_TYPE
618 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
619 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
620
621 if (operation_type
622 && TREE_CODE (operation_type) == INTEGER_TYPE
623 && TYPE_EXTRA_SUBTYPE_P (operation_type))
624 operation_type = get_base_type (operation_type);
625
626 modulus = (operation_type
627 && TREE_CODE (operation_type) == INTEGER_TYPE
628 && TYPE_MODULAR_P (operation_type)
629 ? TYPE_MODULUS (operation_type) : NULL_TREE);
630
631 switch (op_code)
632 {
633 case INIT_EXPR:
634 case MODIFY_EXPR:
635 #ifdef ENABLE_CHECKING
636 gcc_assert (result_type == NULL_TREE);
637 #endif
638 /* If there were integral or pointer conversions on the LHS, remove
639 them; we'll be putting them back below if needed. Likewise for
640 conversions between array and record types, except for justified
641 modular types. But don't do this if the right operand is not
642 BLKmode (for packed arrays) unless we are not changing the mode. */
643 while ((CONVERT_EXPR_P (left_operand)
644 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
645 && (((INTEGRAL_TYPE_P (left_type)
646 || POINTER_TYPE_P (left_type))
647 && (INTEGRAL_TYPE_P (TREE_TYPE
648 (TREE_OPERAND (left_operand, 0)))
649 || POINTER_TYPE_P (TREE_TYPE
650 (TREE_OPERAND (left_operand, 0)))))
651 || (((TREE_CODE (left_type) == RECORD_TYPE
652 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
653 || TREE_CODE (left_type) == ARRAY_TYPE)
654 && ((TREE_CODE (TREE_TYPE
655 (TREE_OPERAND (left_operand, 0)))
656 == RECORD_TYPE)
657 || (TREE_CODE (TREE_TYPE
658 (TREE_OPERAND (left_operand, 0)))
659 == ARRAY_TYPE))
660 && (TYPE_MODE (right_type) == BLKmode
661 || (TYPE_MODE (left_type)
662 == TYPE_MODE (TREE_TYPE
663 (TREE_OPERAND
664 (left_operand, 0))))))))
665 {
666 left_operand = TREE_OPERAND (left_operand, 0);
667 left_type = TREE_TYPE (left_operand);
668 }
669
670 /* If a class-wide type may be involved, force use of the RHS type. */
671 if ((TREE_CODE (right_type) == RECORD_TYPE
672 || TREE_CODE (right_type) == UNION_TYPE)
673 && TYPE_ALIGN_OK (right_type))
674 operation_type = right_type;
675
676 /* If we are copying between padded objects with compatible types, use
677 the padded view of the objects, this is very likely more efficient.
678 Likewise for a padded object that is assigned a constructor, if we
679 can convert the constructor to the inner type, to avoid putting a
680 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
681 actually copied anything. */
682 else if (TYPE_IS_PADDING_P (left_type)
683 && TREE_CONSTANT (TYPE_SIZE (left_type))
684 && ((TREE_CODE (right_operand) == COMPONENT_REF
685 && TYPE_IS_PADDING_P
686 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
687 && gnat_types_compatible_p
688 (left_type,
689 TREE_TYPE (TREE_OPERAND (right_operand, 0))))
690 || (TREE_CODE (right_operand) == CONSTRUCTOR
691 && !CONTAINS_PLACEHOLDER_P
692 (DECL_SIZE (TYPE_FIELDS (left_type)))))
693 && !integer_zerop (TYPE_SIZE (right_type)))
694 operation_type = left_type;
695
696 /* If we have a call to a function that returns an unconstrained type
697 with default discriminant on the RHS, use the RHS type (which is
698 padded) as we cannot compute the size of the actual assignment. */
699 else if (TREE_CODE (right_operand) == CALL_EXPR
700 && TYPE_IS_PADDING_P (right_type)
701 && CONTAINS_PLACEHOLDER_P
702 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (right_type)))))
703 operation_type = right_type;
704
705 /* Find the best type to use for copying between aggregate types. */
706 else if (((TREE_CODE (left_type) == ARRAY_TYPE
707 && TREE_CODE (right_type) == ARRAY_TYPE)
708 || (TREE_CODE (left_type) == RECORD_TYPE
709 && TREE_CODE (right_type) == RECORD_TYPE))
710 && (best_type = find_common_type (left_type, right_type)))
711 operation_type = best_type;
712
713 /* Otherwise use the LHS type. */
714 else
715 operation_type = left_type;
716
717 /* Ensure everything on the LHS is valid. If we have a field reference,
718 strip anything that get_inner_reference can handle. Then remove any
719 conversions between types having the same code and mode. And mark
720 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
721 either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
722 result = left_operand;
723 while (true)
724 {
725 tree restype = TREE_TYPE (result);
726
727 if (TREE_CODE (result) == COMPONENT_REF
728 || TREE_CODE (result) == ARRAY_REF
729 || TREE_CODE (result) == ARRAY_RANGE_REF)
730 while (handled_component_p (result))
731 result = TREE_OPERAND (result, 0);
732 else if (TREE_CODE (result) == REALPART_EXPR
733 || TREE_CODE (result) == IMAGPART_EXPR
734 || (CONVERT_EXPR_P (result)
735 && (((TREE_CODE (restype)
736 == TREE_CODE (TREE_TYPE
737 (TREE_OPERAND (result, 0))))
738 && (TYPE_MODE (TREE_TYPE
739 (TREE_OPERAND (result, 0)))
740 == TYPE_MODE (restype)))
741 || TYPE_ALIGN_OK (restype))))
742 result = TREE_OPERAND (result, 0);
743 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
744 {
745 TREE_ADDRESSABLE (result) = 1;
746 result = TREE_OPERAND (result, 0);
747 }
748 else
749 break;
750 }
751
752 gcc_assert (TREE_CODE (result) == INDIRECT_REF
753 || TREE_CODE (result) == NULL_EXPR
754 || DECL_P (result));
755
756 /* Convert the right operand to the operation type unless it is
757 either already of the correct type or if the type involves a
758 placeholder, since the RHS may not have the same record type. */
759 if (operation_type != right_type
760 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
761 {
762 right_operand = convert (operation_type, right_operand);
763 right_type = operation_type;
764 }
765
766 /* If the left operand is not of the same type as the operation
767 type, wrap it up in a VIEW_CONVERT_EXPR. */
768 if (left_type != operation_type)
769 left_operand = unchecked_convert (operation_type, left_operand, false);
770
771 has_side_effects = true;
772 modulus = NULL_TREE;
773 break;
774
775 case ARRAY_REF:
776 if (!operation_type)
777 operation_type = TREE_TYPE (left_type);
778
779 /* ... fall through ... */
780
781 case ARRAY_RANGE_REF:
782 /* First look through conversion between type variants. Note that
783 this changes neither the operation type nor the type domain. */
784 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
785 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
786 == TYPE_MAIN_VARIANT (left_type))
787 {
788 left_operand = TREE_OPERAND (left_operand, 0);
789 left_type = TREE_TYPE (left_operand);
790 }
791
792 /* For a range, make sure the element type is consistent. */
793 if (op_code == ARRAY_RANGE_REF
794 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
795 operation_type = build_array_type (TREE_TYPE (left_type),
796 TYPE_DOMAIN (operation_type));
797
798 /* Then convert the right operand to its base type. This will prevent
799 unneeded sign conversions when sizetype is wider than integer. */
800 right_operand = convert (right_base_type, right_operand);
801 right_operand = convert (sizetype, right_operand);
802 modulus = NULL_TREE;
803 break;
804
805 case TRUTH_ANDIF_EXPR:
806 case TRUTH_ORIF_EXPR:
807 case TRUTH_AND_EXPR:
808 case TRUTH_OR_EXPR:
809 case TRUTH_XOR_EXPR:
810 #ifdef ENABLE_CHECKING
811 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
812 #endif
813 operation_type = left_base_type;
814 left_operand = convert (operation_type, left_operand);
815 right_operand = convert (operation_type, right_operand);
816 break;
817
818 case GE_EXPR:
819 case LE_EXPR:
820 case GT_EXPR:
821 case LT_EXPR:
822 case EQ_EXPR:
823 case NE_EXPR:
824 #ifdef ENABLE_CHECKING
825 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
826 #endif
827 /* If either operand is a NULL_EXPR, just return a new one. */
828 if (TREE_CODE (left_operand) == NULL_EXPR)
829 return build2 (op_code, result_type,
830 build1 (NULL_EXPR, integer_type_node,
831 TREE_OPERAND (left_operand, 0)),
832 integer_zero_node);
833
834 else if (TREE_CODE (right_operand) == NULL_EXPR)
835 return build2 (op_code, result_type,
836 build1 (NULL_EXPR, integer_type_node,
837 TREE_OPERAND (right_operand, 0)),
838 integer_zero_node);
839
840 /* If either object is a justified modular types, get the
841 fields from within. */
842 if (TREE_CODE (left_type) == RECORD_TYPE
843 && TYPE_JUSTIFIED_MODULAR_P (left_type))
844 {
845 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
846 left_operand);
847 left_type = TREE_TYPE (left_operand);
848 left_base_type = get_base_type (left_type);
849 }
850
851 if (TREE_CODE (right_type) == RECORD_TYPE
852 && TYPE_JUSTIFIED_MODULAR_P (right_type))
853 {
854 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
855 right_operand);
856 right_type = TREE_TYPE (right_operand);
857 right_base_type = get_base_type (right_type);
858 }
859
860 /* If both objects are arrays, compare them specially. */
861 if ((TREE_CODE (left_type) == ARRAY_TYPE
862 || (TREE_CODE (left_type) == INTEGER_TYPE
863 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
864 && (TREE_CODE (right_type) == ARRAY_TYPE
865 || (TREE_CODE (right_type) == INTEGER_TYPE
866 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
867 {
868 result = compare_arrays (input_location,
869 result_type, left_operand, right_operand);
870 if (op_code == NE_EXPR)
871 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
872 else
873 gcc_assert (op_code == EQ_EXPR);
874
875 return result;
876 }
877
878 /* Otherwise, the base types must be the same, unless they are both fat
879 pointer types or record types. In the latter case, use the best type
880 and convert both operands to that type. */
881 if (left_base_type != right_base_type)
882 {
883 if (TYPE_IS_FAT_POINTER_P (left_base_type)
884 && TYPE_IS_FAT_POINTER_P (right_base_type))
885 {
886 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
887 == TYPE_MAIN_VARIANT (right_base_type));
888 best_type = left_base_type;
889 }
890
891 else if (TREE_CODE (left_base_type) == RECORD_TYPE
892 && TREE_CODE (right_base_type) == RECORD_TYPE)
893 {
894 /* The only way this is permitted is if both types have the same
895 name. In that case, one of them must not be self-referential.
896 Use it as the best type. Even better with a fixed size. */
897 gcc_assert (TYPE_NAME (left_base_type)
898 && TYPE_NAME (left_base_type)
899 == TYPE_NAME (right_base_type));
900
901 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
902 best_type = left_base_type;
903 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
904 best_type = right_base_type;
905 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
906 best_type = left_base_type;
907 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
908 best_type = right_base_type;
909 else
910 gcc_unreachable ();
911 }
912
913 else
914 gcc_unreachable ();
915
916 left_operand = convert (best_type, left_operand);
917 right_operand = convert (best_type, right_operand);
918 }
919 else
920 {
921 left_operand = convert (left_base_type, left_operand);
922 right_operand = convert (right_base_type, right_operand);
923 }
924
925 /* If both objects are fat pointers, compare them specially. */
926 if (TYPE_IS_FAT_POINTER_P (left_base_type))
927 {
928 result
929 = compare_fat_pointers (input_location,
930 result_type, left_operand, right_operand);
931 if (op_code == NE_EXPR)
932 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
933 else
934 gcc_assert (op_code == EQ_EXPR);
935
936 return result;
937 }
938
939 modulus = NULL_TREE;
940 break;
941
942 case LSHIFT_EXPR:
943 case RSHIFT_EXPR:
944 case LROTATE_EXPR:
945 case RROTATE_EXPR:
946 /* The RHS of a shift can be any type. Also, ignore any modulus
947 (we used to abort, but this is needed for unchecked conversion
948 to modular types). Otherwise, processing is the same as normal. */
949 gcc_assert (operation_type == left_base_type);
950 modulus = NULL_TREE;
951 left_operand = convert (operation_type, left_operand);
952 break;
953
954 case BIT_AND_EXPR:
955 case BIT_IOR_EXPR:
956 case BIT_XOR_EXPR:
957 /* For binary modulus, if the inputs are in range, so are the
958 outputs. */
959 if (modulus && integer_pow2p (modulus))
960 modulus = NULL_TREE;
961 goto common;
962
963 case COMPLEX_EXPR:
964 gcc_assert (TREE_TYPE (result_type) == left_base_type
965 && TREE_TYPE (result_type) == right_base_type);
966 left_operand = convert (left_base_type, left_operand);
967 right_operand = convert (right_base_type, right_operand);
968 break;
969
970 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
971 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
972 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
973 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
974 /* These always produce results lower than either operand. */
975 modulus = NULL_TREE;
976 goto common;
977
978 case POINTER_PLUS_EXPR:
979 gcc_assert (operation_type == left_base_type
980 && sizetype == right_base_type);
981 left_operand = convert (operation_type, left_operand);
982 right_operand = convert (sizetype, right_operand);
983 break;
984
985 case PLUS_NOMOD_EXPR:
986 case MINUS_NOMOD_EXPR:
987 if (op_code == PLUS_NOMOD_EXPR)
988 op_code = PLUS_EXPR;
989 else
990 op_code = MINUS_EXPR;
991 modulus = NULL_TREE;
992
993 /* ... fall through ... */
994
995 case PLUS_EXPR:
996 case MINUS_EXPR:
997 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
998 other compilers. Contrary to C, Ada doesn't allow arithmetics in
999 these types but can generate addition/subtraction for Succ/Pred. */
1000 if (operation_type
1001 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1002 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1003 operation_type = left_base_type = right_base_type
1004 = gnat_type_for_mode (TYPE_MODE (operation_type),
1005 TYPE_UNSIGNED (operation_type));
1006
1007 /* ... fall through ... */
1008
1009 default:
1010 common:
1011 /* The result type should be the same as the base types of the
1012 both operands (and they should be the same). Convert
1013 everything to the result type. */
1014
1015 gcc_assert (operation_type == left_base_type
1016 && left_base_type == right_base_type);
1017 left_operand = convert (operation_type, left_operand);
1018 right_operand = convert (operation_type, right_operand);
1019 }
1020
1021 if (modulus && !integer_pow2p (modulus))
1022 {
1023 result = nonbinary_modular_operation (op_code, operation_type,
1024 left_operand, right_operand);
1025 modulus = NULL_TREE;
1026 }
1027 /* If either operand is a NULL_EXPR, just return a new one. */
1028 else if (TREE_CODE (left_operand) == NULL_EXPR)
1029 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1030 else if (TREE_CODE (right_operand) == NULL_EXPR)
1031 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1032 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1033 result = fold (build4 (op_code, operation_type, left_operand,
1034 right_operand, NULL_TREE, NULL_TREE));
1035 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1036 result = build2 (op_code, void_type_node, left_operand, right_operand);
1037 else
1038 result
1039 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1040
1041 if (TREE_CONSTANT (result))
1042 ;
1043 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1044 {
1045 TREE_THIS_NOTRAP (result) = 1;
1046 if (TYPE_VOLATILE (operation_type))
1047 TREE_THIS_VOLATILE (result) = 1;
1048 }
1049 else
1050 TREE_CONSTANT (result)
1051 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1052
1053 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1054
1055 /* If we are working with modular types, perform the MOD operation
1056 if something above hasn't eliminated the need for it. */
1057 if (modulus)
1058 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1059 convert (operation_type, modulus));
1060
1061 if (result_type && result_type != operation_type)
1062 result = convert (result_type, result);
1063
1064 return result;
1065 }
1066 \f
1067 /* Similar, but for unary operations. */
1068
1069 tree
1070 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1071 {
1072 tree type = TREE_TYPE (operand);
1073 tree base_type = get_base_type (type);
1074 tree operation_type = result_type;
1075 tree result;
1076
1077 if (operation_type
1078 && TREE_CODE (operation_type) == RECORD_TYPE
1079 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1080 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1081
1082 if (operation_type
1083 && TREE_CODE (operation_type) == INTEGER_TYPE
1084 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1085 operation_type = get_base_type (operation_type);
1086
1087 switch (op_code)
1088 {
1089 case REALPART_EXPR:
1090 case IMAGPART_EXPR:
1091 if (!operation_type)
1092 result_type = operation_type = TREE_TYPE (type);
1093 else
1094 gcc_assert (result_type == TREE_TYPE (type));
1095
1096 result = fold_build1 (op_code, operation_type, operand);
1097 break;
1098
1099 case TRUTH_NOT_EXPR:
1100 #ifdef ENABLE_CHECKING
1101 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1102 #endif
1103 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1104 /* When not optimizing, fold the result as invert_truthvalue_loc
1105 doesn't fold the result of comparisons. This is intended to undo
1106 the trick used for boolean rvalues in gnat_to_gnu. */
1107 if (!optimize)
1108 result = fold (result);
1109 break;
1110
1111 case ATTR_ADDR_EXPR:
1112 case ADDR_EXPR:
1113 switch (TREE_CODE (operand))
1114 {
1115 case INDIRECT_REF:
1116 case UNCONSTRAINED_ARRAY_REF:
1117 result = TREE_OPERAND (operand, 0);
1118
1119 /* Make sure the type here is a pointer, not a reference.
1120 GCC wants pointer types for function addresses. */
1121 if (!result_type)
1122 result_type = build_pointer_type (type);
1123
1124 /* If the underlying object can alias everything, propagate the
1125 property since we are effectively retrieving the object. */
1126 if (POINTER_TYPE_P (TREE_TYPE (result))
1127 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1128 {
1129 if (TREE_CODE (result_type) == POINTER_TYPE
1130 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1131 result_type
1132 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1133 TYPE_MODE (result_type),
1134 true);
1135 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1136 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1137 result_type
1138 = build_reference_type_for_mode (TREE_TYPE (result_type),
1139 TYPE_MODE (result_type),
1140 true);
1141 }
1142 break;
1143
1144 case NULL_EXPR:
1145 result = operand;
1146 TREE_TYPE (result) = type = build_pointer_type (type);
1147 break;
1148
1149 case COMPOUND_EXPR:
1150 /* Fold a compound expression if it has unconstrained array type
1151 since the middle-end cannot handle it. But we don't it in the
1152 general case because it may introduce aliasing issues if the
1153 first operand is an indirect assignment and the second operand
1154 the corresponding address, e.g. for an allocator. */
1155 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1156 {
1157 result = build_unary_op (ADDR_EXPR, result_type,
1158 TREE_OPERAND (operand, 1));
1159 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1160 TREE_OPERAND (operand, 0), result);
1161 break;
1162 }
1163 goto common;
1164
1165 case ARRAY_REF:
1166 case ARRAY_RANGE_REF:
1167 case COMPONENT_REF:
1168 case BIT_FIELD_REF:
1169 /* If this is for 'Address, find the address of the prefix and add
1170 the offset to the field. Otherwise, do this the normal way. */
1171 if (op_code == ATTR_ADDR_EXPR)
1172 {
1173 HOST_WIDE_INT bitsize;
1174 HOST_WIDE_INT bitpos;
1175 tree offset, inner;
1176 enum machine_mode mode;
1177 int unsignedp, volatilep;
1178
1179 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1180 &mode, &unsignedp, &volatilep,
1181 false);
1182
1183 /* If INNER is a padding type whose field has a self-referential
1184 size, convert to that inner type. We know the offset is zero
1185 and we need to have that type visible. */
1186 if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
1187 && CONTAINS_PLACEHOLDER_P
1188 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1189 (TREE_TYPE (inner))))))
1190 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1191 inner);
1192
1193 /* Compute the offset as a byte offset from INNER. */
1194 if (!offset)
1195 offset = size_zero_node;
1196
1197 offset = size_binop (PLUS_EXPR, offset,
1198 size_int (bitpos / BITS_PER_UNIT));
1199
1200 /* Take the address of INNER, convert the offset to void *, and
1201 add then. It will later be converted to the desired result
1202 type, if any. */
1203 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1204 inner = convert (ptr_void_type_node, inner);
1205 result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1206 inner, offset);
1207 result = convert (build_pointer_type (TREE_TYPE (operand)),
1208 result);
1209 break;
1210 }
1211 goto common;
1212
1213 case CONSTRUCTOR:
1214 /* If this is just a constructor for a padded record, we can
1215 just take the address of the single field and convert it to
1216 a pointer to our type. */
1217 if (TYPE_IS_PADDING_P (type))
1218 {
1219 result = VEC_index (constructor_elt,
1220 CONSTRUCTOR_ELTS (operand),
1221 0)->value;
1222 result = convert (build_pointer_type (TREE_TYPE (operand)),
1223 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1224 break;
1225 }
1226
1227 goto common;
1228
1229 case NOP_EXPR:
1230 if (AGGREGATE_TYPE_P (type)
1231 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1232 return build_unary_op (ADDR_EXPR, result_type,
1233 TREE_OPERAND (operand, 0));
1234
1235 /* ... fallthru ... */
1236
1237 case VIEW_CONVERT_EXPR:
1238 /* If this just a variant conversion or if the conversion doesn't
1239 change the mode, get the result type from this type and go down.
1240 This is needed for conversions of CONST_DECLs, to eventually get
1241 to the address of their CORRESPONDING_VARs. */
1242 if ((TYPE_MAIN_VARIANT (type)
1243 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1244 || (TYPE_MODE (type) != BLKmode
1245 && (TYPE_MODE (type)
1246 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1247 return build_unary_op (ADDR_EXPR,
1248 (result_type ? result_type
1249 : build_pointer_type (type)),
1250 TREE_OPERAND (operand, 0));
1251 goto common;
1252
1253 case CONST_DECL:
1254 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1255
1256 /* ... fall through ... */
1257
1258 default:
1259 common:
1260
1261 /* If we are taking the address of a padded record whose field is
1262 contains a template, take the address of the template. */
1263 if (TYPE_IS_PADDING_P (type)
1264 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1265 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1266 {
1267 type = TREE_TYPE (TYPE_FIELDS (type));
1268 operand = convert (type, operand);
1269 }
1270
1271 gnat_mark_addressable (operand);
1272 result = build_fold_addr_expr (operand);
1273 }
1274
1275 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1276 break;
1277
1278 case INDIRECT_REF:
1279 /* If we want to refer to an unconstrained array, use the appropriate
1280 expression to do so. This will never survive down to the back-end.
1281 But if TYPE is a thin pointer, first convert to a fat pointer. */
1282 if (TYPE_IS_THIN_POINTER_P (type)
1283 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
1284 {
1285 operand
1286 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1287 operand);
1288 type = TREE_TYPE (operand);
1289 }
1290
1291 if (TYPE_IS_FAT_POINTER_P (type))
1292 {
1293 result = build1 (UNCONSTRAINED_ARRAY_REF,
1294 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1295 TREE_READONLY (result)
1296 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1297 }
1298
1299 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1300 else if (TREE_CODE (operand) == ADDR_EXPR)
1301 result = TREE_OPERAND (operand, 0);
1302
1303 /* Otherwise, build and fold the indirect reference. */
1304 else
1305 {
1306 result = build_fold_indirect_ref (operand);
1307 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1308 }
1309
1310 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1311 {
1312 TREE_SIDE_EFFECTS (result) = 1;
1313 if (TREE_CODE (result) == INDIRECT_REF)
1314 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1315 }
1316 break;
1317
1318 case NEGATE_EXPR:
1319 case BIT_NOT_EXPR:
1320 {
1321 tree modulus = ((operation_type
1322 && TREE_CODE (operation_type) == INTEGER_TYPE
1323 && TYPE_MODULAR_P (operation_type))
1324 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1325 int mod_pow2 = modulus && integer_pow2p (modulus);
1326
1327 /* If this is a modular type, there are various possibilities
1328 depending on the operation and whether the modulus is a
1329 power of two or not. */
1330
1331 if (modulus)
1332 {
1333 gcc_assert (operation_type == base_type);
1334 operand = convert (operation_type, operand);
1335
1336 /* The fastest in the negate case for binary modulus is
1337 the straightforward code; the TRUNC_MOD_EXPR below
1338 is an AND operation. */
1339 if (op_code == NEGATE_EXPR && mod_pow2)
1340 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1341 fold_build1 (NEGATE_EXPR, operation_type,
1342 operand),
1343 modulus);
1344
1345 /* For nonbinary negate case, return zero for zero operand,
1346 else return the modulus minus the operand. If the modulus
1347 is a power of two minus one, we can do the subtraction
1348 as an XOR since it is equivalent and faster on most machines. */
1349 else if (op_code == NEGATE_EXPR && !mod_pow2)
1350 {
1351 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1352 modulus,
1353 convert (operation_type,
1354 integer_one_node))))
1355 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1356 operand, modulus);
1357 else
1358 result = fold_build2 (MINUS_EXPR, operation_type,
1359 modulus, operand);
1360
1361 result = fold_build3 (COND_EXPR, operation_type,
1362 fold_build2 (NE_EXPR,
1363 boolean_type_node,
1364 operand,
1365 convert
1366 (operation_type,
1367 integer_zero_node)),
1368 result, operand);
1369 }
1370 else
1371 {
1372 /* For the NOT cases, we need a constant equal to
1373 the modulus minus one. For a binary modulus, we
1374 XOR against the constant and subtract the operand from
1375 that constant for nonbinary modulus. */
1376
1377 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1378 convert (operation_type,
1379 integer_one_node));
1380
1381 if (mod_pow2)
1382 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1383 operand, cnst);
1384 else
1385 result = fold_build2 (MINUS_EXPR, operation_type,
1386 cnst, operand);
1387 }
1388
1389 break;
1390 }
1391 }
1392
1393 /* ... fall through ... */
1394
1395 default:
1396 gcc_assert (operation_type == base_type);
1397 result = fold_build1 (op_code, operation_type,
1398 convert (operation_type, operand));
1399 }
1400
1401 if (result_type && TREE_TYPE (result) != result_type)
1402 result = convert (result_type, result);
1403
1404 return result;
1405 }
1406 \f
1407 /* Similar, but for COND_EXPR. */
1408
1409 tree
1410 build_cond_expr (tree result_type, tree condition_operand,
1411 tree true_operand, tree false_operand)
1412 {
1413 bool addr_p = false;
1414 tree result;
1415
1416 /* The front-end verified that result, true and false operands have
1417 same base type. Convert everything to the result type. */
1418 true_operand = convert (result_type, true_operand);
1419 false_operand = convert (result_type, false_operand);
1420
1421 /* If the result type is unconstrained, take the address of the operands and
1422 then dereference the result. Likewise if the result type is passed by
1423 reference, but this is natively handled in the gimplifier. */
1424 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1425 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1426 {
1427 result_type = build_pointer_type (result_type);
1428 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1429 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1430 addr_p = true;
1431 }
1432
1433 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1434 true_operand, false_operand);
1435
1436 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1437 in both arms, make sure it gets evaluated by moving it ahead of the
1438 conditional expression. This is necessary because it is evaluated
1439 in only one place at run time and would otherwise be uninitialized
1440 in one of the arms. */
1441 true_operand = skip_simple_arithmetic (true_operand);
1442 false_operand = skip_simple_arithmetic (false_operand);
1443
1444 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1445 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1446
1447 if (addr_p)
1448 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1449
1450 return result;
1451 }
1452
1453 /* Similar, but for COMPOUND_EXPR. */
1454
1455 tree
1456 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1457 {
1458 bool addr_p = false;
1459 tree result;
1460
1461 /* If the result type is unconstrained, take the address of the operand and
1462 then dereference the result. Likewise if the result type is passed by
1463 reference, but this is natively handled in the gimplifier. */
1464 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1465 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1466 {
1467 result_type = build_pointer_type (result_type);
1468 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1469 addr_p = true;
1470 }
1471
1472 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1473 expr_operand);
1474
1475 if (addr_p)
1476 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1477
1478 return result;
1479 }
1480 \f
1481 /* Conveniently construct a function call expression. FNDECL names the
1482 function to be called, N is the number of arguments, and the "..."
1483 parameters are the argument expressions. Unlike build_call_expr
1484 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1485
1486 tree
1487 build_call_n_expr (tree fndecl, int n, ...)
1488 {
1489 va_list ap;
1490 tree fntype = TREE_TYPE (fndecl);
1491 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1492
1493 va_start (ap, n);
1494 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1495 va_end (ap);
1496 return fn;
1497 }
1498 \f
1499 /* Call a function that raises an exception and pass the line number and file
1500 name, if requested. MSG says which exception function to call.
1501
1502 GNAT_NODE is the gnat node conveying the source location for which the
1503 error should be signaled, or Empty in which case the error is signaled on
1504 the current ref_file_name/input_line.
1505
1506 KIND says which kind of exception this is for
1507 (N_Raise_{Constraint,Storage,Program}_Error). */
1508
1509 tree
1510 build_call_raise (int msg, Node_Id gnat_node, char kind)
1511 {
1512 tree fndecl = gnat_raise_decls[msg];
1513 tree label = get_exception_label (kind);
1514 tree filename;
1515 int line_number;
1516 const char *str;
1517 int len;
1518
1519 /* If this is to be done as a goto, handle that case. */
1520 if (label)
1521 {
1522 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1523 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1524
1525 /* If Local_Raise is present, generate
1526 Local_Raise (exception'Identity); */
1527 if (Present (local_raise))
1528 {
1529 tree gnu_local_raise
1530 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1531 tree gnu_exception_entity
1532 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1533 tree gnu_call
1534 = build_call_n_expr (gnu_local_raise, 1,
1535 build_unary_op (ADDR_EXPR, NULL_TREE,
1536 gnu_exception_entity));
1537
1538 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1539 gnu_call, gnu_result);}
1540
1541 return gnu_result;
1542 }
1543
1544 str
1545 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1546 ? ""
1547 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1548 ? IDENTIFIER_POINTER
1549 (get_identifier (Get_Name_String
1550 (Debug_Source_Name
1551 (Get_Source_File_Index (Sloc (gnat_node))))))
1552 : ref_filename;
1553
1554 len = strlen (str);
1555 filename = build_string (len, str);
1556 line_number
1557 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1558 ? Get_Logical_Line_Number (Sloc(gnat_node)) : input_line;
1559
1560 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1561 build_index_type (size_int (len)));
1562
1563 return
1564 build_call_n_expr (fndecl, 2,
1565 build1 (ADDR_EXPR,
1566 build_pointer_type (unsigned_char_type_node),
1567 filename),
1568 build_int_cst (NULL_TREE, line_number));
1569 }
1570
1571 /* Similar to build_call_raise, for an index or range check exception as
1572 determined by MSG, with extra information generated of the form
1573 "INDEX out of range FIRST..LAST". */
1574
1575 tree
1576 build_call_raise_range (int msg, Node_Id gnat_node,
1577 tree index, tree first, tree last)
1578 {
1579 tree fndecl = gnat_raise_decls_ext[msg];
1580 tree filename;
1581 int line_number, column_number;
1582 const char *str;
1583 int len;
1584
1585 str
1586 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1587 ? ""
1588 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1589 ? IDENTIFIER_POINTER
1590 (get_identifier (Get_Name_String
1591 (Debug_Source_Name
1592 (Get_Source_File_Index (Sloc (gnat_node))))))
1593 : ref_filename;
1594
1595 len = strlen (str);
1596 filename = build_string (len, str);
1597 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1598 {
1599 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1600 column_number = Get_Column_Number (Sloc (gnat_node));
1601 }
1602 else
1603 {
1604 line_number = input_line;
1605 column_number = 0;
1606 }
1607
1608 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1609 build_index_type (size_int (len)));
1610
1611 return
1612 build_call_n_expr (fndecl, 6,
1613 build1 (ADDR_EXPR,
1614 build_pointer_type (unsigned_char_type_node),
1615 filename),
1616 build_int_cst (NULL_TREE, line_number),
1617 build_int_cst (NULL_TREE, column_number),
1618 convert (integer_type_node, index),
1619 convert (integer_type_node, first),
1620 convert (integer_type_node, last));
1621 }
1622
1623 /* Similar to build_call_raise, with extra information about the column
1624 where the check failed. */
1625
1626 tree
1627 build_call_raise_column (int msg, Node_Id gnat_node)
1628 {
1629 tree fndecl = gnat_raise_decls_ext[msg];
1630 tree filename;
1631 int line_number, column_number;
1632 const char *str;
1633 int len;
1634
1635 str
1636 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1637 ? ""
1638 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1639 ? IDENTIFIER_POINTER
1640 (get_identifier (Get_Name_String
1641 (Debug_Source_Name
1642 (Get_Source_File_Index (Sloc (gnat_node))))))
1643 : ref_filename;
1644
1645 len = strlen (str);
1646 filename = build_string (len, str);
1647 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1648 {
1649 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1650 column_number = Get_Column_Number (Sloc (gnat_node));
1651 }
1652 else
1653 {
1654 line_number = input_line;
1655 column_number = 0;
1656 }
1657
1658 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1659 build_index_type (size_int (len)));
1660
1661 return
1662 build_call_n_expr (fndecl, 3,
1663 build1 (ADDR_EXPR,
1664 build_pointer_type (unsigned_char_type_node),
1665 filename),
1666 build_int_cst (NULL_TREE, line_number),
1667 build_int_cst (NULL_TREE, column_number));
1668 }
1669 \f
1670 /* qsort comparer for the bit positions of two constructor elements
1671 for record components. */
1672
1673 static int
1674 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1675 {
1676 const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
1677 const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
1678 const_tree const field1 = elmt1->index;
1679 const_tree const field2 = elmt2->index;
1680 const int ret
1681 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1682
1683 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1684 }
1685
1686 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1687
1688 tree
1689 gnat_build_constructor (tree type, VEC(constructor_elt,gc) *v)
1690 {
1691 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1692 bool side_effects = false;
1693 tree result, obj, val;
1694 unsigned int n_elmts;
1695
1696 /* Scan the elements to see if they are all constant or if any has side
1697 effects, to let us set global flags on the resulting constructor. Count
1698 the elements along the way for possible sorting purposes below. */
1699 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1700 {
1701 /* The predicate must be in keeping with output_constructor. */
1702 if (!TREE_CONSTANT (val)
1703 || (TREE_CODE (type) == RECORD_TYPE
1704 && CONSTRUCTOR_BITFIELD_P (obj)
1705 && !initializer_constant_valid_for_bitfield_p (val))
1706 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1707 allconstant = false;
1708
1709 if (TREE_SIDE_EFFECTS (val))
1710 side_effects = true;
1711 }
1712
1713 /* For record types with constant components only, sort field list
1714 by increasing bit position. This is necessary to ensure the
1715 constructor can be output as static data. */
1716 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1717 VEC_qsort (constructor_elt, v, compare_elmt_bitpos);
1718
1719 result = build_constructor (type, v);
1720 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1721 TREE_SIDE_EFFECTS (result) = side_effects;
1722 TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1723 return result;
1724 }
1725 \f
1726 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1727 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1728 for the field. Don't fold the result if NO_FOLD_P is true.
1729
1730 We also handle the fact that we might have been passed a pointer to the
1731 actual record and know how to look for fields in variant parts. */
1732
1733 static tree
1734 build_simple_component_ref (tree record_variable, tree component,
1735 tree field, bool no_fold_p)
1736 {
1737 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1738 tree ref, inner_variable;
1739
1740 gcc_assert ((TREE_CODE (record_type) == RECORD_TYPE
1741 || TREE_CODE (record_type) == UNION_TYPE
1742 || TREE_CODE (record_type) == QUAL_UNION_TYPE)
1743 && TYPE_SIZE (record_type)
1744 && (component != 0) != (field != 0));
1745
1746 /* If no field was specified, look for a field with the specified name
1747 in the current record only. */
1748 if (!field)
1749 for (field = TYPE_FIELDS (record_type); field;
1750 field = TREE_CHAIN (field))
1751 if (DECL_NAME (field) == component)
1752 break;
1753
1754 if (!field)
1755 return NULL_TREE;
1756
1757 /* If this field is not in the specified record, see if we can find a field
1758 in the specified record whose original field is the same as this one. */
1759 if (DECL_CONTEXT (field) != record_type)
1760 {
1761 tree new_field;
1762
1763 /* First loop thru normal components. */
1764 for (new_field = TYPE_FIELDS (record_type); new_field;
1765 new_field = DECL_CHAIN (new_field))
1766 if (SAME_FIELD_P (field, new_field))
1767 break;
1768
1769 /* Next, see if we're looking for an inherited component in an extension.
1770 If so, look thru the extension directly. */
1771 if (!new_field
1772 && TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1773 && TYPE_ALIGN_OK (record_type)
1774 && TREE_CODE (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1775 == RECORD_TYPE
1776 && TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (record_variable, 0))))
1777 {
1778 ref = build_simple_component_ref (TREE_OPERAND (record_variable, 0),
1779 NULL_TREE, field, no_fold_p);
1780 if (ref)
1781 return ref;
1782 }
1783
1784 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1785 the component in the first search. Doing this search in 2 steps
1786 is required to avoiding hidden homonymous fields in the
1787 _Parent field. */
1788 if (!new_field)
1789 for (new_field = TYPE_FIELDS (record_type); new_field;
1790 new_field = DECL_CHAIN (new_field))
1791 if (DECL_INTERNAL_P (new_field))
1792 {
1793 tree field_ref
1794 = build_simple_component_ref (record_variable,
1795 NULL_TREE, new_field, no_fold_p);
1796 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1797 no_fold_p);
1798
1799 if (ref)
1800 return ref;
1801 }
1802
1803 field = new_field;
1804 }
1805
1806 if (!field)
1807 return NULL_TREE;
1808
1809 /* If the field's offset has overflowed, do not attempt to access it
1810 as doing so may trigger sanity checks deeper in the back-end.
1811 Note that we don't need to warn since this will be done on trying
1812 to declare the object. */
1813 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1814 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1815 return NULL_TREE;
1816
1817 /* Look through conversion between type variants. Note that this
1818 is transparent as far as the field is concerned. */
1819 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1820 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1821 == record_type)
1822 inner_variable = TREE_OPERAND (record_variable, 0);
1823 else
1824 inner_variable = record_variable;
1825
1826 ref = build3 (COMPONENT_REF, TREE_TYPE (field), inner_variable, field,
1827 NULL_TREE);
1828
1829 if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1830 TREE_READONLY (ref) = 1;
1831 if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1832 || TYPE_VOLATILE (record_type))
1833 TREE_THIS_VOLATILE (ref) = 1;
1834
1835 if (no_fold_p)
1836 return ref;
1837
1838 /* The generic folder may punt in this case because the inner array type
1839 can be self-referential, but folding is in fact not problematic. */
1840 else if (TREE_CODE (record_variable) == CONSTRUCTOR
1841 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record_variable)))
1842 {
1843 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (record_variable);
1844 unsigned HOST_WIDE_INT idx;
1845 tree index, value;
1846 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
1847 if (index == field)
1848 return value;
1849 return ref;
1850 }
1851
1852 else
1853 return fold (ref);
1854 }
1855 \f
1856 /* Like build_simple_component_ref, except that we give an error if the
1857 reference could not be found. */
1858
1859 tree
1860 build_component_ref (tree record_variable, tree component,
1861 tree field, bool no_fold_p)
1862 {
1863 tree ref = build_simple_component_ref (record_variable, component, field,
1864 no_fold_p);
1865
1866 if (ref)
1867 return ref;
1868
1869 /* If FIELD was specified, assume this is an invalid user field so raise
1870 Constraint_Error. Otherwise, we have no type to return so abort. */
1871 gcc_assert (field);
1872 return build1 (NULL_EXPR, TREE_TYPE (field),
1873 build_call_raise (CE_Discriminant_Check_Failed, Empty,
1874 N_Raise_Constraint_Error));
1875 }
1876 \f
1877 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
1878 identically. Process the case where a GNAT_PROC to call is provided. */
1879
1880 static inline tree
1881 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
1882 Entity_Id gnat_proc, Entity_Id gnat_pool)
1883 {
1884 tree gnu_proc = gnat_to_gnu (gnat_proc);
1885 tree gnu_call;
1886
1887 /* The storage pools are obviously always tagged types, but the
1888 secondary stack uses the same mechanism and is not tagged. */
1889 if (Is_Tagged_Type (Etype (gnat_pool)))
1890 {
1891 /* The size is the third parameter; the alignment is the
1892 same type. */
1893 Entity_Id gnat_size_type
1894 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1895 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1896
1897 tree gnu_pool = gnat_to_gnu (gnat_pool);
1898 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1899 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
1900
1901 gnu_size = convert (gnu_size_type, gnu_size);
1902 gnu_align = convert (gnu_size_type, gnu_align);
1903
1904 /* The first arg is always the address of the storage pool; next
1905 comes the address of the object, for a deallocator, then the
1906 size and alignment. */
1907 if (gnu_obj)
1908 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
1909 gnu_size, gnu_align);
1910 else
1911 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
1912 gnu_size, gnu_align);
1913 }
1914
1915 /* Secondary stack case. */
1916 else
1917 {
1918 /* The size is the second parameter. */
1919 Entity_Id gnat_size_type
1920 = Etype (Next_Formal (First_Formal (gnat_proc)));
1921 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1922
1923 gnu_size = convert (gnu_size_type, gnu_size);
1924
1925 /* The first arg is the address of the object, for a deallocator,
1926 then the size. */
1927 if (gnu_obj)
1928 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
1929 else
1930 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
1931 }
1932
1933 return gnu_call;
1934 }
1935
1936 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
1937 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
1938 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
1939 latter offers. */
1940
1941 static inline tree
1942 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
1943 {
1944 /* When the DATA_TYPE alignment is stricter than what malloc offers
1945 (super-aligned case), we allocate an "aligning" wrapper type and return
1946 the address of its single data field with the malloc's return value
1947 stored just in front. */
1948
1949 unsigned int data_align = TYPE_ALIGN (data_type);
1950 unsigned int system_allocator_alignment
1951 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
1952
1953 tree aligning_type
1954 = ((data_align > system_allocator_alignment)
1955 ? make_aligning_type (data_type, data_align, data_size,
1956 system_allocator_alignment,
1957 POINTER_SIZE / BITS_PER_UNIT)
1958 : NULL_TREE);
1959
1960 tree size_to_malloc
1961 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
1962
1963 tree malloc_ptr;
1964
1965 /* On VMS, if pointers are 64-bit and the allocator size is 32-bit or
1966 Convention C, allocate 32-bit memory. */
1967 if (TARGET_ABI_OPEN_VMS
1968 && POINTER_SIZE == 64
1969 && Nkind (gnat_node) == N_Allocator
1970 && (UI_To_Int (Esize (Etype (gnat_node))) == 32
1971 || Convention (Etype (gnat_node)) == Convention_C))
1972 malloc_ptr = build_call_n_expr (malloc32_decl, 1, size_to_malloc);
1973 else
1974 malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
1975
1976 if (aligning_type)
1977 {
1978 /* Latch malloc's return value and get a pointer to the aligning field
1979 first. */
1980 tree storage_ptr = gnat_protect_expr (malloc_ptr);
1981
1982 tree aligning_record_addr
1983 = convert (build_pointer_type (aligning_type), storage_ptr);
1984
1985 tree aligning_record
1986 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
1987
1988 tree aligning_field
1989 = build_component_ref (aligning_record, NULL_TREE,
1990 TYPE_FIELDS (aligning_type), false);
1991
1992 tree aligning_field_addr
1993 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
1994
1995 /* Then arrange to store the allocator's return value ahead
1996 and return. */
1997 tree storage_ptr_slot_addr
1998 = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1999 convert (ptr_void_type_node, aligning_field_addr),
2000 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2001 / BITS_PER_UNIT));
2002
2003 tree storage_ptr_slot
2004 = build_unary_op (INDIRECT_REF, NULL_TREE,
2005 convert (build_pointer_type (ptr_void_type_node),
2006 storage_ptr_slot_addr));
2007
2008 return
2009 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2010 build_binary_op (MODIFY_EXPR, NULL_TREE,
2011 storage_ptr_slot, storage_ptr),
2012 aligning_field_addr);
2013 }
2014 else
2015 return malloc_ptr;
2016 }
2017
2018 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2019 designated by DATA_PTR using the __gnat_free entry point. */
2020
2021 static inline tree
2022 maybe_wrap_free (tree data_ptr, tree data_type)
2023 {
2024 /* In the regular alignment case, we pass the data pointer straight to free.
2025 In the superaligned case, we need to retrieve the initial allocator
2026 return value, stored in front of the data block at allocation time. */
2027
2028 unsigned int data_align = TYPE_ALIGN (data_type);
2029 unsigned int system_allocator_alignment
2030 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2031
2032 tree free_ptr;
2033
2034 if (data_align > system_allocator_alignment)
2035 {
2036 /* DATA_FRONT_PTR (void *)
2037 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2038 tree data_front_ptr
2039 = build_binary_op
2040 (POINTER_PLUS_EXPR, ptr_void_type_node,
2041 convert (ptr_void_type_node, data_ptr),
2042 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2043
2044 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2045 free_ptr
2046 = build_unary_op
2047 (INDIRECT_REF, NULL_TREE,
2048 convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
2049 }
2050 else
2051 free_ptr = data_ptr;
2052
2053 return build_call_n_expr (free_decl, 1, free_ptr);
2054 }
2055
2056 /* Build a GCC tree to call an allocation or deallocation function.
2057 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2058 generate an allocator.
2059
2060 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2061 object type, used to determine the to-be-honored address alignment.
2062 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2063 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2064 to provide an error location for restriction violation messages. */
2065
2066 tree
2067 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2068 Entity_Id gnat_proc, Entity_Id gnat_pool,
2069 Node_Id gnat_node)
2070 {
2071 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
2072
2073 /* Explicit proc to call ? This one is assumed to deal with the type
2074 alignment constraints. */
2075 if (Present (gnat_proc))
2076 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2077 gnat_proc, gnat_pool);
2078
2079 /* Otherwise, object to "free" or "malloc" with possible special processing
2080 for alignments stricter than what the default allocator honors. */
2081 else if (gnu_obj)
2082 return maybe_wrap_free (gnu_obj, gnu_type);
2083 else
2084 {
2085 /* Assert that we no longer can be called with this special pool. */
2086 gcc_assert (gnat_pool != -1);
2087
2088 /* Check that we aren't violating the associated restriction. */
2089 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2090 Check_No_Implicit_Heap_Alloc (gnat_node);
2091
2092 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2093 }
2094 }
2095 \f
2096 /* Build a GCC tree to correspond to allocating an object of TYPE whose
2097 initial value is INIT, if INIT is nonzero. Convert the expression to
2098 RESULT_TYPE, which must be some type of pointer. Return the tree.
2099
2100 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2101 the storage pool to use. GNAT_NODE is used to provide an error
2102 location for restriction violation messages. If IGNORE_INIT_TYPE is
2103 true, ignore the type of INIT for the purpose of determining the size;
2104 this will cause the maximum size to be allocated if TYPE is of
2105 self-referential size. */
2106
2107 tree
2108 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2109 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2110 {
2111 tree size = TYPE_SIZE_UNIT (type);
2112 tree result;
2113
2114 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2115 if (init && TREE_CODE (init) == NULL_EXPR)
2116 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2117
2118 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2119 else if (init && TREE_CODE (init) == COND_EXPR)
2120 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2121 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2122 gnat_proc, gnat_pool, gnat_node,
2123 ignore_init_type),
2124 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2125 gnat_proc, gnat_pool, gnat_node,
2126 ignore_init_type));
2127
2128 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2129 sizes of the object and its template. Allocate the whole thing and
2130 fill in the parts that are known. */
2131 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2132 {
2133 tree storage_type
2134 = build_unc_object_type_from_ptr (result_type, type,
2135 get_identifier ("ALLOC"), false);
2136 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2137 tree storage_ptr_type = build_pointer_type (storage_type);
2138 tree storage;
2139
2140 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2141 init);
2142
2143 /* If the size overflows, pass -1 so the allocator will raise
2144 storage error. */
2145 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2146 size = ssize_int (-1);
2147
2148 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2149 gnat_proc, gnat_pool, gnat_node);
2150 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2151
2152 /* If there is an initializing expression, then make a constructor for
2153 the entire object including the bounds and copy it into the object.
2154 If there is no initializing expression, just set the bounds. */
2155 if (init)
2156 {
2157 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
2158
2159 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2160 build_template (template_type, type, init));
2161 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2162 init);
2163 return convert
2164 (result_type,
2165 build2 (COMPOUND_EXPR, storage_ptr_type,
2166 build_binary_op
2167 (MODIFY_EXPR, NULL_TREE,
2168 build_unary_op (INDIRECT_REF, NULL_TREE,
2169 convert (storage_ptr_type, storage)),
2170 gnat_build_constructor (storage_type, v)),
2171 convert (storage_ptr_type, storage)));
2172 }
2173 else
2174 return build2
2175 (COMPOUND_EXPR, result_type,
2176 build_binary_op
2177 (MODIFY_EXPR, NULL_TREE,
2178 build_component_ref
2179 (build_unary_op (INDIRECT_REF, NULL_TREE,
2180 convert (storage_ptr_type, storage)),
2181 NULL_TREE, TYPE_FIELDS (storage_type), false),
2182 build_template (template_type, type, NULL_TREE)),
2183 convert (result_type, convert (storage_ptr_type, storage)));
2184 }
2185
2186 /* If we have an initializing expression, see if its size is simpler
2187 than the size from the type. */
2188 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2189 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2190 || CONTAINS_PLACEHOLDER_P (size)))
2191 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2192
2193 /* If the size is still self-referential, reference the initializing
2194 expression, if it is present. If not, this must have been a
2195 call to allocate a library-level object, in which case we use
2196 the maximum size. */
2197 if (CONTAINS_PLACEHOLDER_P (size))
2198 {
2199 if (!ignore_init_type && init)
2200 size = substitute_placeholder_in_expr (size, init);
2201 else
2202 size = max_size (size, true);
2203 }
2204
2205 /* If the size overflows, pass -1 so the allocator will raise
2206 storage error. */
2207 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2208 size = ssize_int (-1);
2209
2210 result = convert (result_type,
2211 build_call_alloc_dealloc (NULL_TREE, size, type,
2212 gnat_proc, gnat_pool,
2213 gnat_node));
2214
2215 /* If we have an initial value, protect the new address, assign the value
2216 and return the address with a COMPOUND_EXPR. */
2217 if (init)
2218 {
2219 result = gnat_protect_expr (result);
2220 result
2221 = build2 (COMPOUND_EXPR, TREE_TYPE (result),
2222 build_binary_op
2223 (MODIFY_EXPR, NULL_TREE,
2224 build_unary_op (INDIRECT_REF,
2225 TREE_TYPE (TREE_TYPE (result)), result),
2226 init),
2227 result);
2228 }
2229
2230 return convert (result_type, result);
2231 }
2232 \f
2233 /* Indicate that we need to take the address of T and that it therefore
2234 should not be allocated in a register. Returns true if successful. */
2235
2236 bool
2237 gnat_mark_addressable (tree t)
2238 {
2239 while (true)
2240 switch (TREE_CODE (t))
2241 {
2242 case ADDR_EXPR:
2243 case COMPONENT_REF:
2244 case ARRAY_REF:
2245 case ARRAY_RANGE_REF:
2246 case REALPART_EXPR:
2247 case IMAGPART_EXPR:
2248 case VIEW_CONVERT_EXPR:
2249 case NON_LVALUE_EXPR:
2250 CASE_CONVERT:
2251 t = TREE_OPERAND (t, 0);
2252 break;
2253
2254 case COMPOUND_EXPR:
2255 t = TREE_OPERAND (t, 1);
2256 break;
2257
2258 case CONSTRUCTOR:
2259 TREE_ADDRESSABLE (t) = 1;
2260 return true;
2261
2262 case VAR_DECL:
2263 case PARM_DECL:
2264 case RESULT_DECL:
2265 TREE_ADDRESSABLE (t) = 1;
2266 return true;
2267
2268 case FUNCTION_DECL:
2269 TREE_ADDRESSABLE (t) = 1;
2270 return true;
2271
2272 case CONST_DECL:
2273 return DECL_CONST_CORRESPONDING_VAR (t)
2274 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2275
2276 default:
2277 return true;
2278 }
2279 }
2280 \f
2281 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2282 but we know how to handle our own nodes. */
2283
2284 tree
2285 gnat_save_expr (tree exp)
2286 {
2287 tree type = TREE_TYPE (exp);
2288 enum tree_code code = TREE_CODE (exp);
2289
2290 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2291 return exp;
2292
2293 if (code == UNCONSTRAINED_ARRAY_REF)
2294 {
2295 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2296 TREE_READONLY (t) = TYPE_READONLY (type);
2297 return t;
2298 }
2299
2300 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2301 This may be more efficient, but will also allow us to more easily find
2302 the match for the PLACEHOLDER_EXPR. */
2303 if (code == COMPONENT_REF
2304 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2305 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2306 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2307
2308 return save_expr (exp);
2309 }
2310
2311 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2312 is optimized under the assumption that EXP's value doesn't change before
2313 its subsequent reuse(s) except through its potential reevaluation. */
2314
2315 tree
2316 gnat_protect_expr (tree exp)
2317 {
2318 tree type = TREE_TYPE (exp);
2319 enum tree_code code = TREE_CODE (exp);
2320
2321 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2322 return exp;
2323
2324 /* If EXP has no side effects, we theoretically don't need to do anything.
2325 However, we may be recursively passed more and more complex expressions
2326 involving checks which will be reused multiple times and eventually be
2327 unshared for gimplification; in order to avoid a complexity explosion
2328 at that point, we protect any expressions more complex than a simple
2329 arithmetic expression. */
2330 if (!TREE_SIDE_EFFECTS (exp))
2331 {
2332 tree inner = skip_simple_arithmetic (exp);
2333 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2334 return exp;
2335 }
2336
2337 /* If this is a conversion, protect what's inside the conversion. */
2338 if (code == NON_LVALUE_EXPR
2339 || CONVERT_EXPR_CODE_P (code)
2340 || code == VIEW_CONVERT_EXPR)
2341 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2342
2343 /* If we're indirectly referencing something, we only need to protect the
2344 address since the data itself can't change in these situations. */
2345 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2346 {
2347 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2348 TREE_READONLY (t) = TYPE_READONLY (type);
2349 return t;
2350 }
2351
2352 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2353 This may be more efficient, but will also allow us to more easily find
2354 the match for the PLACEHOLDER_EXPR. */
2355 if (code == COMPONENT_REF
2356 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2357 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2358 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2359
2360 /* If this is a fat pointer or something that can be placed in a register,
2361 just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
2362 returned via invisible reference in most ABIs so the temporary will
2363 directly be filled by the callee. */
2364 if (TYPE_IS_FAT_POINTER_P (type)
2365 || TYPE_MODE (type) != BLKmode
2366 || code == CALL_EXPR)
2367 return save_expr (exp);
2368
2369 /* Otherwise reference, protect the address and dereference. */
2370 return
2371 build_unary_op (INDIRECT_REF, type,
2372 save_expr (build_unary_op (ADDR_EXPR,
2373 build_reference_type (type),
2374 exp)));
2375 }
2376
2377 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2378 argument to force evaluation of everything. */
2379
2380 static tree
2381 gnat_stabilize_reference_1 (tree e, bool force)
2382 {
2383 enum tree_code code = TREE_CODE (e);
2384 tree type = TREE_TYPE (e);
2385 tree result;
2386
2387 /* We cannot ignore const expressions because it might be a reference
2388 to a const array but whose index contains side-effects. But we can
2389 ignore things that are actual constant or that already have been
2390 handled by this function. */
2391 if (TREE_CONSTANT (e) || code == SAVE_EXPR)
2392 return e;
2393
2394 switch (TREE_CODE_CLASS (code))
2395 {
2396 case tcc_exceptional:
2397 case tcc_declaration:
2398 case tcc_comparison:
2399 case tcc_expression:
2400 case tcc_reference:
2401 case tcc_vl_exp:
2402 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2403 fat pointer. This may be more efficient, but will also allow
2404 us to more easily find the match for the PLACEHOLDER_EXPR. */
2405 if (code == COMPONENT_REF
2406 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2407 result
2408 = build3 (code, type,
2409 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2410 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2411 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2412 so that it will only be evaluated once. */
2413 /* The tcc_reference and tcc_comparison classes could be handled as
2414 below, but it is generally faster to only evaluate them once. */
2415 else if (TREE_SIDE_EFFECTS (e) || force)
2416 return save_expr (e);
2417 else
2418 return e;
2419 break;
2420
2421 case tcc_binary:
2422 /* Recursively stabilize each operand. */
2423 result
2424 = build2 (code, type,
2425 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2426 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), force));
2427 break;
2428
2429 case tcc_unary:
2430 /* Recursively stabilize each operand. */
2431 result
2432 = build1 (code, type,
2433 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force));
2434 break;
2435
2436 default:
2437 gcc_unreachable ();
2438 }
2439
2440 /* See similar handling in gnat_stabilize_reference. */
2441 TREE_READONLY (result) = TREE_READONLY (e);
2442 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2443 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2444
2445 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
2446 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (e);
2447
2448 return result;
2449 }
2450
2451 /* This is equivalent to stabilize_reference in tree.c but we know how to
2452 handle our own nodes and we take extra arguments. FORCE says whether to
2453 force evaluation of everything. We set SUCCESS to true unless we walk
2454 through something we don't know how to stabilize. */
2455
2456 tree
2457 gnat_stabilize_reference (tree ref, bool force, bool *success)
2458 {
2459 tree type = TREE_TYPE (ref);
2460 enum tree_code code = TREE_CODE (ref);
2461 tree result;
2462
2463 /* Assume we'll success unless proven otherwise. */
2464 if (success)
2465 *success = true;
2466
2467 switch (code)
2468 {
2469 case CONST_DECL:
2470 case VAR_DECL:
2471 case PARM_DECL:
2472 case RESULT_DECL:
2473 /* No action is needed in this case. */
2474 return ref;
2475
2476 case ADDR_EXPR:
2477 CASE_CONVERT:
2478 case FLOAT_EXPR:
2479 case FIX_TRUNC_EXPR:
2480 case VIEW_CONVERT_EXPR:
2481 result
2482 = build1 (code, type,
2483 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2484 success));
2485 break;
2486
2487 case INDIRECT_REF:
2488 case UNCONSTRAINED_ARRAY_REF:
2489 result = build1 (code, type,
2490 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 0),
2491 force));
2492 break;
2493
2494 case COMPONENT_REF:
2495 result = build3 (COMPONENT_REF, type,
2496 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2497 success),
2498 TREE_OPERAND (ref, 1), NULL_TREE);
2499 break;
2500
2501 case BIT_FIELD_REF:
2502 result = build3 (BIT_FIELD_REF, type,
2503 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2504 success),
2505 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2506 force),
2507 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 2),
2508 force));
2509 break;
2510
2511 case ARRAY_REF:
2512 case ARRAY_RANGE_REF:
2513 result = build4 (code, type,
2514 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2515 success),
2516 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2517 force),
2518 NULL_TREE, NULL_TREE);
2519 break;
2520
2521 case CALL_EXPR:
2522 result = gnat_stabilize_reference_1 (ref, force);
2523 break;
2524
2525 case COMPOUND_EXPR:
2526 result = build2 (COMPOUND_EXPR, type,
2527 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2528 success),
2529 gnat_stabilize_reference (TREE_OPERAND (ref, 1), force,
2530 success));
2531 break;
2532
2533 case CONSTRUCTOR:
2534 /* Constructors with 1 element are used extensively to formally
2535 convert objects to special wrapping types. */
2536 if (TREE_CODE (type) == RECORD_TYPE
2537 && VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ref)) == 1)
2538 {
2539 tree index
2540 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->index;
2541 tree value
2542 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->value;
2543 result
2544 = build_constructor_single (type, index,
2545 gnat_stabilize_reference_1 (value,
2546 force));
2547 }
2548 else
2549 {
2550 if (success)
2551 *success = false;
2552 return ref;
2553 }
2554 break;
2555
2556 case ERROR_MARK:
2557 ref = error_mark_node;
2558
2559 /* ... fall through to failure ... */
2560
2561 /* If arg isn't a kind of lvalue we recognize, make no change.
2562 Caller should recognize the error for an invalid lvalue. */
2563 default:
2564 if (success)
2565 *success = false;
2566 return ref;
2567 }
2568
2569 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2570 may not be sustained across some paths, such as the way via build1 for
2571 INDIRECT_REF. We reset those flags here in the general case, which is
2572 consistent with the GCC version of this routine.
2573
2574 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2575 paths introduce side-effects where there was none initially (e.g. if a
2576 SAVE_EXPR is built) and we also want to keep track of that. */
2577 TREE_READONLY (result) = TREE_READONLY (ref);
2578 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2579 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2580
2581 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
2582 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2583
2584 return result;
2585 }
This page took 0.150103 seconds and 5 git commands to generate.