]> gcc.gnu.org Git - gcc.git/blob - gcc/ada/gcc-interface/utils2.c
utils2.c ((build_binary_op): Don't set TREE_THIS_NOTRAP on array references here...
[gcc.git] / gcc / ada / gcc-interface / utils2.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2014, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "stor-layout.h"
32 #include "stringpool.h"
33 #include "varasm.h"
34 #include "flags.h"
35 #include "toplev.h"
36 #include "ggc.h"
37 #include "tree-inline.h"
38
39 #include "ada.h"
40 #include "types.h"
41 #include "atree.h"
42 #include "elists.h"
43 #include "namet.h"
44 #include "nlists.h"
45 #include "snames.h"
46 #include "stringt.h"
47 #include "uintp.h"
48 #include "fe.h"
49 #include "sinfo.h"
50 #include "einfo.h"
51 #include "ada-tree.h"
52 #include "gigi.h"
53
54 /* Return the base type of TYPE. */
55
56 tree
57 get_base_type (tree type)
58 {
59 if (TREE_CODE (type) == RECORD_TYPE
60 && TYPE_JUSTIFIED_MODULAR_P (type))
61 type = TREE_TYPE (TYPE_FIELDS (type));
62
63 while (TREE_TYPE (type)
64 && (TREE_CODE (type) == INTEGER_TYPE
65 || TREE_CODE (type) == REAL_TYPE))
66 type = TREE_TYPE (type);
67
68 return type;
69 }
70 \f
71 /* EXP is a GCC tree representing an address. See if we can find how
72 strictly the object at that address is aligned. Return that alignment
73 in bits. If we don't know anything about the alignment, return 0. */
74
75 unsigned int
76 known_alignment (tree exp)
77 {
78 unsigned int this_alignment;
79 unsigned int lhs, rhs;
80
81 switch (TREE_CODE (exp))
82 {
83 CASE_CONVERT:
84 case VIEW_CONVERT_EXPR:
85 case NON_LVALUE_EXPR:
86 /* Conversions between pointers and integers don't change the alignment
87 of the underlying object. */
88 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
89 break;
90
91 case COMPOUND_EXPR:
92 /* The value of a COMPOUND_EXPR is that of it's second operand. */
93 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
94 break;
95
96 case PLUS_EXPR:
97 case MINUS_EXPR:
98 /* If two address are added, the alignment of the result is the
99 minimum of the two alignments. */
100 lhs = known_alignment (TREE_OPERAND (exp, 0));
101 rhs = known_alignment (TREE_OPERAND (exp, 1));
102 this_alignment = MIN (lhs, rhs);
103 break;
104
105 case POINTER_PLUS_EXPR:
106 lhs = known_alignment (TREE_OPERAND (exp, 0));
107 rhs = known_alignment (TREE_OPERAND (exp, 1));
108 /* If we don't know the alignment of the offset, we assume that
109 of the base. */
110 if (rhs == 0)
111 this_alignment = lhs;
112 else
113 this_alignment = MIN (lhs, rhs);
114 break;
115
116 case COND_EXPR:
117 /* If there is a choice between two values, use the smallest one. */
118 lhs = known_alignment (TREE_OPERAND (exp, 1));
119 rhs = known_alignment (TREE_OPERAND (exp, 2));
120 this_alignment = MIN (lhs, rhs);
121 break;
122
123 case INTEGER_CST:
124 {
125 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
126 /* The first part of this represents the lowest bit in the constant,
127 but it is originally in bytes, not bits. */
128 this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
129 }
130 break;
131
132 case MULT_EXPR:
133 /* If we know the alignment of just one side, use it. Otherwise,
134 use the product of the alignments. */
135 lhs = known_alignment (TREE_OPERAND (exp, 0));
136 rhs = known_alignment (TREE_OPERAND (exp, 1));
137
138 if (lhs == 0)
139 this_alignment = rhs;
140 else if (rhs == 0)
141 this_alignment = lhs;
142 else
143 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
144 break;
145
146 case BIT_AND_EXPR:
147 /* A bit-and expression is as aligned as the maximum alignment of the
148 operands. We typically get here for a complex lhs and a constant
149 negative power of two on the rhs to force an explicit alignment, so
150 don't bother looking at the lhs. */
151 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
152 break;
153
154 case ADDR_EXPR:
155 this_alignment = expr_align (TREE_OPERAND (exp, 0));
156 break;
157
158 case CALL_EXPR:
159 {
160 tree t = maybe_inline_call_in_expr (exp);
161 if (t)
162 return known_alignment (t);
163 }
164
165 /* Fall through... */
166
167 default:
168 /* For other pointer expressions, we assume that the pointed-to object
169 is at least as aligned as the pointed-to type. Beware that we can
170 have a dummy type here (e.g. a Taft Amendment type), for which the
171 alignment is meaningless and should be ignored. */
172 if (POINTER_TYPE_P (TREE_TYPE (exp))
173 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
174 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
175 else
176 this_alignment = 0;
177 break;
178 }
179
180 return this_alignment;
181 }
182 \f
183 /* We have a comparison or assignment operation on two types, T1 and T2, which
184 are either both array types or both record types. T1 is assumed to be for
185 the left hand side operand, and T2 for the right hand side. Return the
186 type that both operands should be converted to for the operation, if any.
187 Otherwise return zero. */
188
189 static tree
190 find_common_type (tree t1, tree t2)
191 {
192 /* ??? As of today, various constructs lead to here with types of different
193 sizes even when both constants (e.g. tagged types, packable vs regular
194 component types, padded vs unpadded types, ...). While some of these
195 would better be handled upstream (types should be made consistent before
196 calling into build_binary_op), some others are really expected and we
197 have to be careful. */
198
199 /* We must avoid writing more than what the target can hold if this is for
200 an assignment and the case of tagged types is handled in build_binary_op
201 so we use the lhs type if it is known to be smaller or of constant size
202 and the rhs type is not, whatever the modes. We also force t1 in case of
203 constant size equality to minimize occurrences of view conversions on the
204 lhs of an assignment, except for the case of record types with a variant
205 part on the lhs but not on the rhs to make the conversion simpler. */
206 if (TREE_CONSTANT (TYPE_SIZE (t1))
207 && (!TREE_CONSTANT (TYPE_SIZE (t2))
208 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
209 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
210 && !(TREE_CODE (t1) == RECORD_TYPE
211 && TREE_CODE (t2) == RECORD_TYPE
212 && get_variant_part (t1) != NULL_TREE
213 && get_variant_part (t2) == NULL_TREE))))
214 return t1;
215
216 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
217 that we will not have any alignment problems since, if we did, the
218 non-BLKmode type could not have been used. */
219 if (TYPE_MODE (t1) != BLKmode)
220 return t1;
221
222 /* If the rhs type is of constant size, use it whatever the modes. At
223 this point it is known to be smaller, or of constant size and the
224 lhs type is not. */
225 if (TREE_CONSTANT (TYPE_SIZE (t2)))
226 return t2;
227
228 /* Otherwise, if the rhs type is non-BLKmode, use it. */
229 if (TYPE_MODE (t2) != BLKmode)
230 return t2;
231
232 /* In this case, both types have variable size and BLKmode. It's
233 probably best to leave the "type mismatch" because changing it
234 could cause a bad self-referential reference. */
235 return NULL_TREE;
236 }
237 \f
238 /* Return an expression tree representing an equality comparison of A1 and A2,
239 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
240
241 Two arrays are equal in one of two ways: (1) if both have zero length in
242 some dimension (not necessarily the same dimension) or (2) if the lengths
243 in each dimension are equal and the data is equal. We perform the length
244 tests in as efficient a manner as possible. */
245
246 static tree
247 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
248 {
249 tree result = convert (result_type, boolean_true_node);
250 tree a1_is_null = convert (result_type, boolean_false_node);
251 tree a2_is_null = convert (result_type, boolean_false_node);
252 tree t1 = TREE_TYPE (a1);
253 tree t2 = TREE_TYPE (a2);
254 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
255 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
256 bool length_zero_p = false;
257
258 /* If either operand has side-effects, they have to be evaluated only once
259 in spite of the multiple references to the operand in the comparison. */
260 if (a1_side_effects_p)
261 a1 = gnat_protect_expr (a1);
262
263 if (a2_side_effects_p)
264 a2 = gnat_protect_expr (a2);
265
266 /* Process each dimension separately and compare the lengths. If any
267 dimension has a length known to be zero, set LENGTH_ZERO_P to true
268 in order to suppress the comparison of the data at the end. */
269 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
270 {
271 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
272 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
273 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
274 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
275 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
276 size_one_node);
277 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
278 size_one_node);
279 tree comparison, this_a1_is_null, this_a2_is_null;
280
281 /* If the length of the first array is a constant, swap our operands
282 unless the length of the second array is the constant zero. */
283 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
284 {
285 tree tem;
286 bool btem;
287
288 tem = a1, a1 = a2, a2 = tem;
289 tem = t1, t1 = t2, t2 = tem;
290 tem = lb1, lb1 = lb2, lb2 = tem;
291 tem = ub1, ub1 = ub2, ub2 = tem;
292 tem = length1, length1 = length2, length2 = tem;
293 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
294 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
295 a2_side_effects_p = btem;
296 }
297
298 /* If the length of the second array is the constant zero, we can just
299 use the original stored bounds for the first array and see whether
300 last < first holds. */
301 if (integer_zerop (length2))
302 {
303 length_zero_p = true;
304
305 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
306 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
307
308 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
309 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
310 if (EXPR_P (comparison))
311 SET_EXPR_LOCATION (comparison, loc);
312
313 this_a1_is_null = comparison;
314 this_a2_is_null = convert (result_type, boolean_true_node);
315 }
316
317 /* Otherwise, if the length is some other constant value, we know that
318 this dimension in the second array cannot be superflat, so we can
319 just use its length computed from the actual stored bounds. */
320 else if (TREE_CODE (length2) == INTEGER_CST)
321 {
322 tree bt;
323
324 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
325 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
326 /* Note that we know that UB2 and LB2 are constant and hence
327 cannot contain a PLACEHOLDER_EXPR. */
328 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
329 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
330 bt = get_base_type (TREE_TYPE (ub1));
331
332 comparison
333 = fold_build2_loc (loc, EQ_EXPR, result_type,
334 build_binary_op (MINUS_EXPR, bt, ub1, lb1),
335 build_binary_op (MINUS_EXPR, bt, ub2, lb2));
336 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
337 if (EXPR_P (comparison))
338 SET_EXPR_LOCATION (comparison, loc);
339
340 this_a1_is_null
341 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
342
343 this_a2_is_null = convert (result_type, boolean_false_node);
344 }
345
346 /* Otherwise, compare the computed lengths. */
347 else
348 {
349 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
350 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
351
352 comparison
353 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
354
355 /* If the length expression is of the form (cond ? val : 0), assume
356 that cond is equivalent to (length != 0). That's guaranteed by
357 construction of the array types in gnat_to_gnu_entity. */
358 if (TREE_CODE (length1) == COND_EXPR
359 && integer_zerop (TREE_OPERAND (length1, 2)))
360 this_a1_is_null
361 = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
362 else
363 this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
364 length1, size_zero_node);
365
366 /* Likewise for the second array. */
367 if (TREE_CODE (length2) == COND_EXPR
368 && integer_zerop (TREE_OPERAND (length2, 2)))
369 this_a2_is_null
370 = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
371 else
372 this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
373 length2, size_zero_node);
374 }
375
376 /* Append expressions for this dimension to the final expressions. */
377 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
378 result, comparison);
379
380 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
381 this_a1_is_null, a1_is_null);
382
383 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
384 this_a2_is_null, a2_is_null);
385
386 t1 = TREE_TYPE (t1);
387 t2 = TREE_TYPE (t2);
388 }
389
390 /* Unless the length of some dimension is known to be zero, compare the
391 data in the array. */
392 if (!length_zero_p)
393 {
394 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
395 tree comparison;
396
397 if (type)
398 {
399 a1 = convert (type, a1),
400 a2 = convert (type, a2);
401 }
402
403 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
404
405 result
406 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
407 }
408
409 /* The result is also true if both sizes are zero. */
410 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
411 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
412 a1_is_null, a2_is_null),
413 result);
414
415 /* If either operand has side-effects, they have to be evaluated before
416 starting the comparison above since the place they would be otherwise
417 evaluated could be wrong. */
418 if (a1_side_effects_p)
419 result = build2 (COMPOUND_EXPR, result_type, a1, result);
420
421 if (a2_side_effects_p)
422 result = build2 (COMPOUND_EXPR, result_type, a2, result);
423
424 return result;
425 }
426
427 /* Return an expression tree representing an equality comparison of P1 and P2,
428 two objects of fat pointer type. The result should be of type RESULT_TYPE.
429
430 Two fat pointers are equal in one of two ways: (1) if both have a null
431 pointer to the array or (2) if they contain the same couple of pointers.
432 We perform the comparison in as efficient a manner as possible. */
433
434 static tree
435 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
436 {
437 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
438 tree p1_array_is_null, p2_array_is_null;
439
440 /* If either operand has side-effects, they have to be evaluated only once
441 in spite of the multiple references to the operand in the comparison. */
442 p1 = gnat_protect_expr (p1);
443 p2 = gnat_protect_expr (p2);
444
445 /* The constant folder doesn't fold fat pointer types so we do it here. */
446 if (TREE_CODE (p1) == CONSTRUCTOR)
447 p1_array = (*CONSTRUCTOR_ELTS (p1))[0].value;
448 else
449 p1_array = build_component_ref (p1, NULL_TREE,
450 TYPE_FIELDS (TREE_TYPE (p1)), true);
451
452 p1_array_is_null
453 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
454 fold_convert_loc (loc, TREE_TYPE (p1_array),
455 null_pointer_node));
456
457 if (TREE_CODE (p2) == CONSTRUCTOR)
458 p2_array = (*CONSTRUCTOR_ELTS (p2))[0].value;
459 else
460 p2_array = build_component_ref (p2, NULL_TREE,
461 TYPE_FIELDS (TREE_TYPE (p2)), true);
462
463 p2_array_is_null
464 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
465 fold_convert_loc (loc, TREE_TYPE (p2_array),
466 null_pointer_node));
467
468 /* If one of the pointers to the array is null, just compare the other. */
469 if (integer_zerop (p1_array))
470 return p2_array_is_null;
471 else if (integer_zerop (p2_array))
472 return p1_array_is_null;
473
474 /* Otherwise, do the fully-fledged comparison. */
475 same_array
476 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
477
478 if (TREE_CODE (p1) == CONSTRUCTOR)
479 p1_bounds = (*CONSTRUCTOR_ELTS (p1))[1].value;
480 else
481 p1_bounds
482 = build_component_ref (p1, NULL_TREE,
483 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))), true);
484
485 if (TREE_CODE (p2) == CONSTRUCTOR)
486 p2_bounds = (*CONSTRUCTOR_ELTS (p2))[1].value;
487 else
488 p2_bounds
489 = build_component_ref (p2, NULL_TREE,
490 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))), true);
491
492 same_bounds
493 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
494
495 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
496 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
497 build_binary_op (TRUTH_ORIF_EXPR, result_type,
498 p1_array_is_null, same_bounds));
499 }
500 \f
501 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
502 type TYPE. We know that TYPE is a modular type with a nonbinary
503 modulus. */
504
505 static tree
506 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
507 tree rhs)
508 {
509 tree modulus = TYPE_MODULUS (type);
510 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
511 unsigned int precision;
512 bool unsignedp = true;
513 tree op_type = type;
514 tree result;
515
516 /* If this is an addition of a constant, convert it to a subtraction
517 of a constant since we can do that faster. */
518 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
519 {
520 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
521 op_code = MINUS_EXPR;
522 }
523
524 /* For the logical operations, we only need PRECISION bits. For
525 addition and subtraction, we need one more and for multiplication we
526 need twice as many. But we never want to make a size smaller than
527 our size. */
528 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
529 needed_precision += 1;
530 else if (op_code == MULT_EXPR)
531 needed_precision *= 2;
532
533 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
534
535 /* Unsigned will do for everything but subtraction. */
536 if (op_code == MINUS_EXPR)
537 unsignedp = false;
538
539 /* If our type is the wrong signedness or isn't wide enough, make a new
540 type and convert both our operands to it. */
541 if (TYPE_PRECISION (op_type) < precision
542 || TYPE_UNSIGNED (op_type) != unsignedp)
543 {
544 /* Copy the node so we ensure it can be modified to make it modular. */
545 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
546 modulus = convert (op_type, modulus);
547 SET_TYPE_MODULUS (op_type, modulus);
548 TYPE_MODULAR_P (op_type) = 1;
549 lhs = convert (op_type, lhs);
550 rhs = convert (op_type, rhs);
551 }
552
553 /* Do the operation, then we'll fix it up. */
554 result = fold_build2 (op_code, op_type, lhs, rhs);
555
556 /* For multiplication, we have no choice but to do a full modulus
557 operation. However, we want to do this in the narrowest
558 possible size. */
559 if (op_code == MULT_EXPR)
560 {
561 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
562 modulus = convert (div_type, modulus);
563 SET_TYPE_MODULUS (div_type, modulus);
564 TYPE_MODULAR_P (div_type) = 1;
565 result = convert (op_type,
566 fold_build2 (TRUNC_MOD_EXPR, div_type,
567 convert (div_type, result), modulus));
568 }
569
570 /* For subtraction, add the modulus back if we are negative. */
571 else if (op_code == MINUS_EXPR)
572 {
573 result = gnat_protect_expr (result);
574 result = fold_build3 (COND_EXPR, op_type,
575 fold_build2 (LT_EXPR, boolean_type_node, result,
576 convert (op_type, integer_zero_node)),
577 fold_build2 (PLUS_EXPR, op_type, result, modulus),
578 result);
579 }
580
581 /* For the other operations, subtract the modulus if we are >= it. */
582 else
583 {
584 result = gnat_protect_expr (result);
585 result = fold_build3 (COND_EXPR, op_type,
586 fold_build2 (GE_EXPR, boolean_type_node,
587 result, modulus),
588 fold_build2 (MINUS_EXPR, op_type,
589 result, modulus),
590 result);
591 }
592
593 return convert (type, result);
594 }
595 \f
596 /* This page contains routines that implement the Ada semantics with regard
597 to atomic objects. They are fully piggybacked on the middle-end support
598 for atomic loads and stores.
599
600 *** Memory barriers and volatile objects ***
601
602 We implement the weakened form of the C.6(16) clause that was introduced
603 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
604 implementable without significant performance hits on modern platforms.
605
606 We also take advantage of the requirements imposed on shared variables by
607 9.10 (conditions for sequential actions) to have non-erroneous execution
608 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
609 volatile updates with regard to sequential actions, i.e. with regard to
610 reads or updates of atomic objects.
611
612 As such, an update of an atomic object by a task requires that all earlier
613 accesses to volatile objects have completed. Similarly, later accesses to
614 volatile objects cannot be reordered before the update of the atomic object.
615 So, memory barriers both before and after the atomic update are needed.
616
617 For a read of an atomic object, to avoid seeing writes of volatile objects
618 by a task earlier than by the other tasks, a memory barrier is needed before
619 the atomic read. Finally, to avoid reordering later reads or updates of
620 volatile objects to before the atomic read, a barrier is needed after the
621 atomic read.
622
623 So, memory barriers are needed before and after atomic reads and updates.
624 And, in order to simplify the implementation, we use full memory barriers
625 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
626
627 /* Return the size of TYPE, which must be a positive power of 2. */
628
629 static unsigned int
630 resolve_atomic_size (tree type)
631 {
632 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
633
634 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
635 return size;
636
637 /* We shouldn't reach here without having already detected that the size
638 isn't compatible with an atomic access. */
639 gcc_assert (Serious_Errors_Detected);
640
641 return 0;
642 }
643
644 /* Build an atomic load for the underlying atomic object in SRC. */
645
646 tree
647 build_atomic_load (tree src)
648 {
649 tree ptr_type
650 = build_pointer_type
651 (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
652 tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
653 tree orig_src = src;
654 tree t, addr, val;
655 unsigned int size;
656 int fncode;
657
658 /* Remove conversions to get the address of the underlying object. */
659 src = remove_conversions (src, false);
660 size = resolve_atomic_size (TREE_TYPE (src));
661 if (size == 0)
662 return orig_src;
663
664 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
665 t = builtin_decl_implicit ((enum built_in_function) fncode);
666
667 addr = build_unary_op (ADDR_EXPR, ptr_type, src);
668 val = build_call_expr (t, 2, addr, mem_model);
669
670 /* First reinterpret the loaded bits in the original type of the load,
671 then convert to the expected result type. */
672 t = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (src), val);
673 return convert (TREE_TYPE (orig_src), t);
674 }
675
676 /* Build an atomic store from SRC to the underlying atomic object in DEST. */
677
678 tree
679 build_atomic_store (tree dest, tree src)
680 {
681 tree ptr_type
682 = build_pointer_type
683 (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
684 tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
685 tree orig_dest = dest;
686 tree t, int_type, addr;
687 unsigned int size;
688 int fncode;
689
690 /* Remove conversions to get the address of the underlying object. */
691 dest = remove_conversions (dest, false);
692 size = resolve_atomic_size (TREE_TYPE (dest));
693 if (size == 0)
694 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
695
696 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
697 t = builtin_decl_implicit ((enum built_in_function) fncode);
698 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
699
700 /* First convert the bits to be stored to the original type of the store,
701 then reinterpret them in the effective type. But if the original type
702 is a padded type with the same size, convert to the inner type instead,
703 as we don't want to artificially introduce a CONSTRUCTOR here. */
704 if (TYPE_IS_PADDING_P (TREE_TYPE (dest))
705 && TYPE_SIZE (TREE_TYPE (dest))
706 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest)))))
707 src = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest))), src);
708 else
709 src = convert (TREE_TYPE (dest), src);
710 src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
711 addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
712
713 return build_call_expr (t, 3, addr, src, mem_model);
714 }
715 \f
716 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
717 desired for the result. Usually the operation is to be performed
718 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
719 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
720 case the type to be used will be derived from the operands.
721
722 This function is very much unlike the ones for C and C++ since we
723 have already done any type conversion and matching required. All we
724 have to do here is validate the work done by SEM and handle subtypes. */
725
726 tree
727 build_binary_op (enum tree_code op_code, tree result_type,
728 tree left_operand, tree right_operand)
729 {
730 tree left_type = TREE_TYPE (left_operand);
731 tree right_type = TREE_TYPE (right_operand);
732 tree left_base_type = get_base_type (left_type);
733 tree right_base_type = get_base_type (right_type);
734 tree operation_type = result_type;
735 tree best_type = NULL_TREE;
736 tree modulus, result;
737 bool has_side_effects = false;
738
739 if (operation_type
740 && TREE_CODE (operation_type) == RECORD_TYPE
741 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
742 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
743
744 if (operation_type
745 && TREE_CODE (operation_type) == INTEGER_TYPE
746 && TYPE_EXTRA_SUBTYPE_P (operation_type))
747 operation_type = get_base_type (operation_type);
748
749 modulus = (operation_type
750 && TREE_CODE (operation_type) == INTEGER_TYPE
751 && TYPE_MODULAR_P (operation_type)
752 ? TYPE_MODULUS (operation_type) : NULL_TREE);
753
754 switch (op_code)
755 {
756 case INIT_EXPR:
757 case MODIFY_EXPR:
758 #ifdef ENABLE_CHECKING
759 gcc_assert (result_type == NULL_TREE);
760 #endif
761 /* If there were integral or pointer conversions on the LHS, remove
762 them; we'll be putting them back below if needed. Likewise for
763 conversions between array and record types, except for justified
764 modular types. But don't do this if the right operand is not
765 BLKmode (for packed arrays) unless we are not changing the mode. */
766 while ((CONVERT_EXPR_P (left_operand)
767 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
768 && (((INTEGRAL_TYPE_P (left_type)
769 || POINTER_TYPE_P (left_type))
770 && (INTEGRAL_TYPE_P (TREE_TYPE
771 (TREE_OPERAND (left_operand, 0)))
772 || POINTER_TYPE_P (TREE_TYPE
773 (TREE_OPERAND (left_operand, 0)))))
774 || (((TREE_CODE (left_type) == RECORD_TYPE
775 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
776 || TREE_CODE (left_type) == ARRAY_TYPE)
777 && ((TREE_CODE (TREE_TYPE
778 (TREE_OPERAND (left_operand, 0)))
779 == RECORD_TYPE)
780 || (TREE_CODE (TREE_TYPE
781 (TREE_OPERAND (left_operand, 0)))
782 == ARRAY_TYPE))
783 && (TYPE_MODE (right_type) == BLKmode
784 || (TYPE_MODE (left_type)
785 == TYPE_MODE (TREE_TYPE
786 (TREE_OPERAND
787 (left_operand, 0))))))))
788 {
789 left_operand = TREE_OPERAND (left_operand, 0);
790 left_type = TREE_TYPE (left_operand);
791 }
792
793 /* If a class-wide type may be involved, force use of the RHS type. */
794 if ((TREE_CODE (right_type) == RECORD_TYPE
795 || TREE_CODE (right_type) == UNION_TYPE)
796 && TYPE_ALIGN_OK (right_type))
797 operation_type = right_type;
798
799 /* If we are copying between padded objects with compatible types, use
800 the padded view of the objects, this is very likely more efficient.
801 Likewise for a padded object that is assigned a constructor, if we
802 can convert the constructor to the inner type, to avoid putting a
803 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
804 actually copied anything. */
805 else if (TYPE_IS_PADDING_P (left_type)
806 && TREE_CONSTANT (TYPE_SIZE (left_type))
807 && ((TREE_CODE (right_operand) == COMPONENT_REF
808 && TYPE_MAIN_VARIANT (left_type)
809 == TYPE_MAIN_VARIANT
810 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
811 || (TREE_CODE (right_operand) == CONSTRUCTOR
812 && !CONTAINS_PLACEHOLDER_P
813 (DECL_SIZE (TYPE_FIELDS (left_type)))))
814 && !integer_zerop (TYPE_SIZE (right_type)))
815 {
816 /* We make an exception for a BLKmode type padding a non-BLKmode
817 inner type and do the conversion of the LHS right away, since
818 unchecked_convert wouldn't do it properly. */
819 if (TYPE_MODE (left_type) == BLKmode
820 && TYPE_MODE (right_type) != BLKmode
821 && TREE_CODE (right_operand) != CONSTRUCTOR)
822 {
823 operation_type = right_type;
824 left_operand = convert (operation_type, left_operand);
825 left_type = operation_type;
826 }
827 else
828 operation_type = left_type;
829 }
830
831 /* If we have a call to a function that returns an unconstrained type
832 with default discriminant on the RHS, use the RHS type (which is
833 padded) as we cannot compute the size of the actual assignment. */
834 else if (TREE_CODE (right_operand) == CALL_EXPR
835 && TYPE_IS_PADDING_P (right_type)
836 && CONTAINS_PLACEHOLDER_P
837 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (right_type)))))
838 operation_type = right_type;
839
840 /* Find the best type to use for copying between aggregate types. */
841 else if (((TREE_CODE (left_type) == ARRAY_TYPE
842 && TREE_CODE (right_type) == ARRAY_TYPE)
843 || (TREE_CODE (left_type) == RECORD_TYPE
844 && TREE_CODE (right_type) == RECORD_TYPE))
845 && (best_type = find_common_type (left_type, right_type)))
846 operation_type = best_type;
847
848 /* Otherwise use the LHS type. */
849 else
850 operation_type = left_type;
851
852 /* Ensure everything on the LHS is valid. If we have a field reference,
853 strip anything that get_inner_reference can handle. Then remove any
854 conversions between types having the same code and mode. And mark
855 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
856 either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
857 result = left_operand;
858 while (true)
859 {
860 tree restype = TREE_TYPE (result);
861
862 if (TREE_CODE (result) == COMPONENT_REF
863 || TREE_CODE (result) == ARRAY_REF
864 || TREE_CODE (result) == ARRAY_RANGE_REF)
865 while (handled_component_p (result))
866 result = TREE_OPERAND (result, 0);
867 else if (TREE_CODE (result) == REALPART_EXPR
868 || TREE_CODE (result) == IMAGPART_EXPR
869 || (CONVERT_EXPR_P (result)
870 && (((TREE_CODE (restype)
871 == TREE_CODE (TREE_TYPE
872 (TREE_OPERAND (result, 0))))
873 && (TYPE_MODE (TREE_TYPE
874 (TREE_OPERAND (result, 0)))
875 == TYPE_MODE (restype)))
876 || TYPE_ALIGN_OK (restype))))
877 result = TREE_OPERAND (result, 0);
878 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
879 {
880 TREE_ADDRESSABLE (result) = 1;
881 result = TREE_OPERAND (result, 0);
882 }
883 else
884 break;
885 }
886
887 gcc_assert (TREE_CODE (result) == INDIRECT_REF
888 || TREE_CODE (result) == NULL_EXPR
889 || DECL_P (result));
890
891 /* Convert the right operand to the operation type unless it is
892 either already of the correct type or if the type involves a
893 placeholder, since the RHS may not have the same record type. */
894 if (operation_type != right_type
895 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
896 {
897 right_operand = convert (operation_type, right_operand);
898 right_type = operation_type;
899 }
900
901 /* If the left operand is not of the same type as the operation
902 type, wrap it up in a VIEW_CONVERT_EXPR. */
903 if (left_type != operation_type)
904 left_operand = unchecked_convert (operation_type, left_operand, false);
905
906 has_side_effects = true;
907 modulus = NULL_TREE;
908 break;
909
910 case ARRAY_REF:
911 if (!operation_type)
912 operation_type = TREE_TYPE (left_type);
913
914 /* ... fall through ... */
915
916 case ARRAY_RANGE_REF:
917 /* First look through conversion between type variants. Note that
918 this changes neither the operation type nor the type domain. */
919 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
920 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
921 == TYPE_MAIN_VARIANT (left_type))
922 {
923 left_operand = TREE_OPERAND (left_operand, 0);
924 left_type = TREE_TYPE (left_operand);
925 }
926
927 /* For a range, make sure the element type is consistent. */
928 if (op_code == ARRAY_RANGE_REF
929 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
930 operation_type = build_array_type (TREE_TYPE (left_type),
931 TYPE_DOMAIN (operation_type));
932
933 /* Then convert the right operand to its base type. This will prevent
934 unneeded sign conversions when sizetype is wider than integer. */
935 right_operand = convert (right_base_type, right_operand);
936 right_operand = convert_to_index_type (right_operand);
937 modulus = NULL_TREE;
938 break;
939
940 case TRUTH_ANDIF_EXPR:
941 case TRUTH_ORIF_EXPR:
942 case TRUTH_AND_EXPR:
943 case TRUTH_OR_EXPR:
944 case TRUTH_XOR_EXPR:
945 #ifdef ENABLE_CHECKING
946 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
947 #endif
948 operation_type = left_base_type;
949 left_operand = convert (operation_type, left_operand);
950 right_operand = convert (operation_type, right_operand);
951 break;
952
953 case GE_EXPR:
954 case LE_EXPR:
955 case GT_EXPR:
956 case LT_EXPR:
957 case EQ_EXPR:
958 case NE_EXPR:
959 #ifdef ENABLE_CHECKING
960 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
961 #endif
962 /* If either operand is a NULL_EXPR, just return a new one. */
963 if (TREE_CODE (left_operand) == NULL_EXPR)
964 return build2 (op_code, result_type,
965 build1 (NULL_EXPR, integer_type_node,
966 TREE_OPERAND (left_operand, 0)),
967 integer_zero_node);
968
969 else if (TREE_CODE (right_operand) == NULL_EXPR)
970 return build2 (op_code, result_type,
971 build1 (NULL_EXPR, integer_type_node,
972 TREE_OPERAND (right_operand, 0)),
973 integer_zero_node);
974
975 /* If either object is a justified modular types, get the
976 fields from within. */
977 if (TREE_CODE (left_type) == RECORD_TYPE
978 && TYPE_JUSTIFIED_MODULAR_P (left_type))
979 {
980 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
981 left_operand);
982 left_type = TREE_TYPE (left_operand);
983 left_base_type = get_base_type (left_type);
984 }
985
986 if (TREE_CODE (right_type) == RECORD_TYPE
987 && TYPE_JUSTIFIED_MODULAR_P (right_type))
988 {
989 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
990 right_operand);
991 right_type = TREE_TYPE (right_operand);
992 right_base_type = get_base_type (right_type);
993 }
994
995 /* If both objects are arrays, compare them specially. */
996 if ((TREE_CODE (left_type) == ARRAY_TYPE
997 || (TREE_CODE (left_type) == INTEGER_TYPE
998 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
999 && (TREE_CODE (right_type) == ARRAY_TYPE
1000 || (TREE_CODE (right_type) == INTEGER_TYPE
1001 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1002 {
1003 result = compare_arrays (input_location,
1004 result_type, left_operand, right_operand);
1005 if (op_code == NE_EXPR)
1006 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1007 else
1008 gcc_assert (op_code == EQ_EXPR);
1009
1010 return result;
1011 }
1012
1013 /* Otherwise, the base types must be the same, unless they are both fat
1014 pointer types or record types. In the latter case, use the best type
1015 and convert both operands to that type. */
1016 if (left_base_type != right_base_type)
1017 {
1018 if (TYPE_IS_FAT_POINTER_P (left_base_type)
1019 && TYPE_IS_FAT_POINTER_P (right_base_type))
1020 {
1021 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1022 == TYPE_MAIN_VARIANT (right_base_type));
1023 best_type = left_base_type;
1024 }
1025
1026 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1027 && TREE_CODE (right_base_type) == RECORD_TYPE)
1028 {
1029 /* The only way this is permitted is if both types have the same
1030 name. In that case, one of them must not be self-referential.
1031 Use it as the best type. Even better with a fixed size. */
1032 gcc_assert (TYPE_NAME (left_base_type)
1033 && TYPE_NAME (left_base_type)
1034 == TYPE_NAME (right_base_type));
1035
1036 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1037 best_type = left_base_type;
1038 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1039 best_type = right_base_type;
1040 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1041 best_type = left_base_type;
1042 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1043 best_type = right_base_type;
1044 else
1045 gcc_unreachable ();
1046 }
1047
1048 else
1049 gcc_unreachable ();
1050
1051 left_operand = convert (best_type, left_operand);
1052 right_operand = convert (best_type, right_operand);
1053 }
1054 else
1055 {
1056 left_operand = convert (left_base_type, left_operand);
1057 right_operand = convert (right_base_type, right_operand);
1058 }
1059
1060 /* If both objects are fat pointers, compare them specially. */
1061 if (TYPE_IS_FAT_POINTER_P (left_base_type))
1062 {
1063 result
1064 = compare_fat_pointers (input_location,
1065 result_type, left_operand, right_operand);
1066 if (op_code == NE_EXPR)
1067 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1068 else
1069 gcc_assert (op_code == EQ_EXPR);
1070
1071 return result;
1072 }
1073
1074 modulus = NULL_TREE;
1075 break;
1076
1077 case LSHIFT_EXPR:
1078 case RSHIFT_EXPR:
1079 case LROTATE_EXPR:
1080 case RROTATE_EXPR:
1081 /* The RHS of a shift can be any type. Also, ignore any modulus
1082 (we used to abort, but this is needed for unchecked conversion
1083 to modular types). Otherwise, processing is the same as normal. */
1084 gcc_assert (operation_type == left_base_type);
1085 modulus = NULL_TREE;
1086 left_operand = convert (operation_type, left_operand);
1087 break;
1088
1089 case BIT_AND_EXPR:
1090 case BIT_IOR_EXPR:
1091 case BIT_XOR_EXPR:
1092 /* For binary modulus, if the inputs are in range, so are the
1093 outputs. */
1094 if (modulus && integer_pow2p (modulus))
1095 modulus = NULL_TREE;
1096 goto common;
1097
1098 case COMPLEX_EXPR:
1099 gcc_assert (TREE_TYPE (result_type) == left_base_type
1100 && TREE_TYPE (result_type) == right_base_type);
1101 left_operand = convert (left_base_type, left_operand);
1102 right_operand = convert (right_base_type, right_operand);
1103 break;
1104
1105 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1106 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1107 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1108 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1109 /* These always produce results lower than either operand. */
1110 modulus = NULL_TREE;
1111 goto common;
1112
1113 case POINTER_PLUS_EXPR:
1114 gcc_assert (operation_type == left_base_type
1115 && sizetype == right_base_type);
1116 left_operand = convert (operation_type, left_operand);
1117 right_operand = convert (sizetype, right_operand);
1118 break;
1119
1120 case PLUS_NOMOD_EXPR:
1121 case MINUS_NOMOD_EXPR:
1122 if (op_code == PLUS_NOMOD_EXPR)
1123 op_code = PLUS_EXPR;
1124 else
1125 op_code = MINUS_EXPR;
1126 modulus = NULL_TREE;
1127
1128 /* ... fall through ... */
1129
1130 case PLUS_EXPR:
1131 case MINUS_EXPR:
1132 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1133 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1134 these types but can generate addition/subtraction for Succ/Pred. */
1135 if (operation_type
1136 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1137 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1138 operation_type = left_base_type = right_base_type
1139 = gnat_type_for_mode (TYPE_MODE (operation_type),
1140 TYPE_UNSIGNED (operation_type));
1141
1142 /* ... fall through ... */
1143
1144 default:
1145 common:
1146 /* The result type should be the same as the base types of the
1147 both operands (and they should be the same). Convert
1148 everything to the result type. */
1149
1150 gcc_assert (operation_type == left_base_type
1151 && left_base_type == right_base_type);
1152 left_operand = convert (operation_type, left_operand);
1153 right_operand = convert (operation_type, right_operand);
1154 }
1155
1156 if (modulus && !integer_pow2p (modulus))
1157 {
1158 result = nonbinary_modular_operation (op_code, operation_type,
1159 left_operand, right_operand);
1160 modulus = NULL_TREE;
1161 }
1162 /* If either operand is a NULL_EXPR, just return a new one. */
1163 else if (TREE_CODE (left_operand) == NULL_EXPR)
1164 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1165 else if (TREE_CODE (right_operand) == NULL_EXPR)
1166 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1167 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1168 result = fold (build4 (op_code, operation_type, left_operand,
1169 right_operand, NULL_TREE, NULL_TREE));
1170 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1171 result = build2 (op_code, void_type_node, left_operand, right_operand);
1172 else
1173 result
1174 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1175
1176 if (TREE_CONSTANT (result))
1177 ;
1178 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1179 {
1180 if (TYPE_VOLATILE (operation_type))
1181 TREE_THIS_VOLATILE (result) = 1;
1182 }
1183 else
1184 TREE_CONSTANT (result)
1185 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1186
1187 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1188
1189 /* If we are working with modular types, perform the MOD operation
1190 if something above hasn't eliminated the need for it. */
1191 if (modulus)
1192 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1193 convert (operation_type, modulus));
1194
1195 if (result_type && result_type != operation_type)
1196 result = convert (result_type, result);
1197
1198 return result;
1199 }
1200 \f
1201 /* Similar, but for unary operations. */
1202
1203 tree
1204 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1205 {
1206 tree type = TREE_TYPE (operand);
1207 tree base_type = get_base_type (type);
1208 tree operation_type = result_type;
1209 tree result;
1210
1211 if (operation_type
1212 && TREE_CODE (operation_type) == RECORD_TYPE
1213 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1214 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1215
1216 if (operation_type
1217 && TREE_CODE (operation_type) == INTEGER_TYPE
1218 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1219 operation_type = get_base_type (operation_type);
1220
1221 switch (op_code)
1222 {
1223 case REALPART_EXPR:
1224 case IMAGPART_EXPR:
1225 if (!operation_type)
1226 result_type = operation_type = TREE_TYPE (type);
1227 else
1228 gcc_assert (result_type == TREE_TYPE (type));
1229
1230 result = fold_build1 (op_code, operation_type, operand);
1231 break;
1232
1233 case TRUTH_NOT_EXPR:
1234 #ifdef ENABLE_CHECKING
1235 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1236 #endif
1237 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1238 /* When not optimizing, fold the result as invert_truthvalue_loc
1239 doesn't fold the result of comparisons. This is intended to undo
1240 the trick used for boolean rvalues in gnat_to_gnu. */
1241 if (!optimize)
1242 result = fold (result);
1243 break;
1244
1245 case ATTR_ADDR_EXPR:
1246 case ADDR_EXPR:
1247 switch (TREE_CODE (operand))
1248 {
1249 case INDIRECT_REF:
1250 case UNCONSTRAINED_ARRAY_REF:
1251 result = TREE_OPERAND (operand, 0);
1252
1253 /* Make sure the type here is a pointer, not a reference.
1254 GCC wants pointer types for function addresses. */
1255 if (!result_type)
1256 result_type = build_pointer_type (type);
1257
1258 /* If the underlying object can alias everything, propagate the
1259 property since we are effectively retrieving the object. */
1260 if (POINTER_TYPE_P (TREE_TYPE (result))
1261 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1262 {
1263 if (TREE_CODE (result_type) == POINTER_TYPE
1264 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1265 result_type
1266 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1267 TYPE_MODE (result_type),
1268 true);
1269 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1270 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1271 result_type
1272 = build_reference_type_for_mode (TREE_TYPE (result_type),
1273 TYPE_MODE (result_type),
1274 true);
1275 }
1276 break;
1277
1278 case NULL_EXPR:
1279 result = operand;
1280 TREE_TYPE (result) = type = build_pointer_type (type);
1281 break;
1282
1283 case COMPOUND_EXPR:
1284 /* Fold a compound expression if it has unconstrained array type
1285 since the middle-end cannot handle it. But we don't it in the
1286 general case because it may introduce aliasing issues if the
1287 first operand is an indirect assignment and the second operand
1288 the corresponding address, e.g. for an allocator. */
1289 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1290 {
1291 result = build_unary_op (ADDR_EXPR, result_type,
1292 TREE_OPERAND (operand, 1));
1293 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1294 TREE_OPERAND (operand, 0), result);
1295 break;
1296 }
1297 goto common;
1298
1299 case ARRAY_REF:
1300 case ARRAY_RANGE_REF:
1301 case COMPONENT_REF:
1302 case BIT_FIELD_REF:
1303 /* If this is for 'Address, find the address of the prefix and add
1304 the offset to the field. Otherwise, do this the normal way. */
1305 if (op_code == ATTR_ADDR_EXPR)
1306 {
1307 HOST_WIDE_INT bitsize;
1308 HOST_WIDE_INT bitpos;
1309 tree offset, inner;
1310 enum machine_mode mode;
1311 int unsignedp, volatilep;
1312
1313 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1314 &mode, &unsignedp, &volatilep,
1315 false);
1316
1317 /* If INNER is a padding type whose field has a self-referential
1318 size, convert to that inner type. We know the offset is zero
1319 and we need to have that type visible. */
1320 if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
1321 && CONTAINS_PLACEHOLDER_P
1322 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1323 (TREE_TYPE (inner))))))
1324 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1325 inner);
1326
1327 /* Compute the offset as a byte offset from INNER. */
1328 if (!offset)
1329 offset = size_zero_node;
1330
1331 offset = size_binop (PLUS_EXPR, offset,
1332 size_int (bitpos / BITS_PER_UNIT));
1333
1334 /* Take the address of INNER, convert the offset to void *, and
1335 add then. It will later be converted to the desired result
1336 type, if any. */
1337 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1338 inner = convert (ptr_void_type_node, inner);
1339 result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1340 inner, offset);
1341 result = convert (build_pointer_type (TREE_TYPE (operand)),
1342 result);
1343 break;
1344 }
1345 goto common;
1346
1347 case CONSTRUCTOR:
1348 /* If this is just a constructor for a padded record, we can
1349 just take the address of the single field and convert it to
1350 a pointer to our type. */
1351 if (TYPE_IS_PADDING_P (type))
1352 {
1353 result = (*CONSTRUCTOR_ELTS (operand))[0].value;
1354 result = convert (build_pointer_type (TREE_TYPE (operand)),
1355 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1356 break;
1357 }
1358
1359 goto common;
1360
1361 case NOP_EXPR:
1362 if (AGGREGATE_TYPE_P (type)
1363 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1364 return build_unary_op (ADDR_EXPR, result_type,
1365 TREE_OPERAND (operand, 0));
1366
1367 /* ... fallthru ... */
1368
1369 case VIEW_CONVERT_EXPR:
1370 /* If this just a variant conversion or if the conversion doesn't
1371 change the mode, get the result type from this type and go down.
1372 This is needed for conversions of CONST_DECLs, to eventually get
1373 to the address of their CORRESPONDING_VARs. */
1374 if ((TYPE_MAIN_VARIANT (type)
1375 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1376 || (TYPE_MODE (type) != BLKmode
1377 && (TYPE_MODE (type)
1378 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1379 return build_unary_op (ADDR_EXPR,
1380 (result_type ? result_type
1381 : build_pointer_type (type)),
1382 TREE_OPERAND (operand, 0));
1383 goto common;
1384
1385 case CONST_DECL:
1386 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1387
1388 /* ... fall through ... */
1389
1390 default:
1391 common:
1392
1393 /* If we are taking the address of a padded record whose field
1394 contains a template, take the address of the field. */
1395 if (TYPE_IS_PADDING_P (type)
1396 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1397 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1398 {
1399 type = TREE_TYPE (TYPE_FIELDS (type));
1400 operand = convert (type, operand);
1401 }
1402
1403 gnat_mark_addressable (operand);
1404 result = build_fold_addr_expr (operand);
1405 }
1406
1407 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1408 break;
1409
1410 case INDIRECT_REF:
1411 {
1412 tree t = remove_conversions (operand, false);
1413 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1414
1415 /* If TYPE is a thin pointer, either first retrieve the base if this
1416 is an expression with an offset built for the initialization of an
1417 object with an unconstrained nominal subtype, or else convert to
1418 the fat pointer. */
1419 if (TYPE_IS_THIN_POINTER_P (type))
1420 {
1421 tree rec_type = TREE_TYPE (type);
1422
1423 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1424 && TREE_OPERAND (operand, 1)
1425 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1426 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1427 {
1428 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1429 type = TREE_TYPE (operand);
1430 }
1431 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1432 {
1433 operand
1434 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1435 operand);
1436 type = TREE_TYPE (operand);
1437 }
1438 }
1439
1440 /* If we want to refer to an unconstrained array, use the appropriate
1441 expression. But this will never survive down to the back-end. */
1442 if (TYPE_IS_FAT_POINTER_P (type))
1443 {
1444 result = build1 (UNCONSTRAINED_ARRAY_REF,
1445 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1446 TREE_READONLY (result)
1447 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1448 }
1449
1450 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1451 else if (TREE_CODE (operand) == ADDR_EXPR)
1452 result = TREE_OPERAND (operand, 0);
1453
1454 /* Otherwise, build and fold the indirect reference. */
1455 else
1456 {
1457 result = build_fold_indirect_ref (operand);
1458 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1459 }
1460
1461 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1462 {
1463 TREE_SIDE_EFFECTS (result) = 1;
1464 if (TREE_CODE (result) == INDIRECT_REF)
1465 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1466 }
1467
1468 if ((TREE_CODE (result) == INDIRECT_REF
1469 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1470 && can_never_be_null)
1471 TREE_THIS_NOTRAP (result) = 1;
1472
1473 break;
1474 }
1475
1476 case NEGATE_EXPR:
1477 case BIT_NOT_EXPR:
1478 {
1479 tree modulus = ((operation_type
1480 && TREE_CODE (operation_type) == INTEGER_TYPE
1481 && TYPE_MODULAR_P (operation_type))
1482 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1483 int mod_pow2 = modulus && integer_pow2p (modulus);
1484
1485 /* If this is a modular type, there are various possibilities
1486 depending on the operation and whether the modulus is a
1487 power of two or not. */
1488
1489 if (modulus)
1490 {
1491 gcc_assert (operation_type == base_type);
1492 operand = convert (operation_type, operand);
1493
1494 /* The fastest in the negate case for binary modulus is
1495 the straightforward code; the TRUNC_MOD_EXPR below
1496 is an AND operation. */
1497 if (op_code == NEGATE_EXPR && mod_pow2)
1498 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1499 fold_build1 (NEGATE_EXPR, operation_type,
1500 operand),
1501 modulus);
1502
1503 /* For nonbinary negate case, return zero for zero operand,
1504 else return the modulus minus the operand. If the modulus
1505 is a power of two minus one, we can do the subtraction
1506 as an XOR since it is equivalent and faster on most machines. */
1507 else if (op_code == NEGATE_EXPR && !mod_pow2)
1508 {
1509 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1510 modulus,
1511 convert (operation_type,
1512 integer_one_node))))
1513 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1514 operand, modulus);
1515 else
1516 result = fold_build2 (MINUS_EXPR, operation_type,
1517 modulus, operand);
1518
1519 result = fold_build3 (COND_EXPR, operation_type,
1520 fold_build2 (NE_EXPR,
1521 boolean_type_node,
1522 operand,
1523 convert
1524 (operation_type,
1525 integer_zero_node)),
1526 result, operand);
1527 }
1528 else
1529 {
1530 /* For the NOT cases, we need a constant equal to
1531 the modulus minus one. For a binary modulus, we
1532 XOR against the constant and subtract the operand from
1533 that constant for nonbinary modulus. */
1534
1535 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1536 convert (operation_type,
1537 integer_one_node));
1538
1539 if (mod_pow2)
1540 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1541 operand, cnst);
1542 else
1543 result = fold_build2 (MINUS_EXPR, operation_type,
1544 cnst, operand);
1545 }
1546
1547 break;
1548 }
1549 }
1550
1551 /* ... fall through ... */
1552
1553 default:
1554 gcc_assert (operation_type == base_type);
1555 result = fold_build1 (op_code, operation_type,
1556 convert (operation_type, operand));
1557 }
1558
1559 if (result_type && TREE_TYPE (result) != result_type)
1560 result = convert (result_type, result);
1561
1562 return result;
1563 }
1564 \f
1565 /* Similar, but for COND_EXPR. */
1566
1567 tree
1568 build_cond_expr (tree result_type, tree condition_operand,
1569 tree true_operand, tree false_operand)
1570 {
1571 bool addr_p = false;
1572 tree result;
1573
1574 /* The front-end verified that result, true and false operands have
1575 same base type. Convert everything to the result type. */
1576 true_operand = convert (result_type, true_operand);
1577 false_operand = convert (result_type, false_operand);
1578
1579 /* If the result type is unconstrained, take the address of the operands and
1580 then dereference the result. Likewise if the result type is passed by
1581 reference, because creating a temporary of this type is not allowed. */
1582 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1583 || TYPE_IS_BY_REFERENCE_P (result_type)
1584 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1585 {
1586 result_type = build_pointer_type (result_type);
1587 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1588 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1589 addr_p = true;
1590 }
1591
1592 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1593 true_operand, false_operand);
1594
1595 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1596 in both arms, make sure it gets evaluated by moving it ahead of the
1597 conditional expression. This is necessary because it is evaluated
1598 in only one place at run time and would otherwise be uninitialized
1599 in one of the arms. */
1600 true_operand = skip_simple_arithmetic (true_operand);
1601 false_operand = skip_simple_arithmetic (false_operand);
1602
1603 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1604 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1605
1606 if (addr_p)
1607 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1608
1609 return result;
1610 }
1611
1612 /* Similar, but for COMPOUND_EXPR. */
1613
1614 tree
1615 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1616 {
1617 bool addr_p = false;
1618 tree result;
1619
1620 /* If the result type is unconstrained, take the address of the operand and
1621 then dereference the result. Likewise if the result type is passed by
1622 reference, but this is natively handled in the gimplifier. */
1623 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1624 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1625 {
1626 result_type = build_pointer_type (result_type);
1627 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1628 addr_p = true;
1629 }
1630
1631 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1632 expr_operand);
1633
1634 if (addr_p)
1635 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1636
1637 return result;
1638 }
1639 \f
1640 /* Conveniently construct a function call expression. FNDECL names the
1641 function to be called, N is the number of arguments, and the "..."
1642 parameters are the argument expressions. Unlike build_call_expr
1643 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1644
1645 tree
1646 build_call_n_expr (tree fndecl, int n, ...)
1647 {
1648 va_list ap;
1649 tree fntype = TREE_TYPE (fndecl);
1650 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1651
1652 va_start (ap, n);
1653 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1654 va_end (ap);
1655 return fn;
1656 }
1657 \f
1658 /* Call a function that raises an exception and pass the line number and file
1659 name, if requested. MSG says which exception function to call.
1660
1661 GNAT_NODE is the gnat node conveying the source location for which the
1662 error should be signaled, or Empty in which case the error is signaled on
1663 the current ref_file_name/input_line.
1664
1665 KIND says which kind of exception this is for
1666 (N_Raise_{Constraint,Storage,Program}_Error). */
1667
1668 tree
1669 build_call_raise (int msg, Node_Id gnat_node, char kind)
1670 {
1671 tree fndecl = gnat_raise_decls[msg];
1672 tree label = get_exception_label (kind);
1673 tree filename;
1674 int line_number;
1675 const char *str;
1676 int len;
1677
1678 /* If this is to be done as a goto, handle that case. */
1679 if (label)
1680 {
1681 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1682 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1683
1684 /* If Local_Raise is present, generate
1685 Local_Raise (exception'Identity); */
1686 if (Present (local_raise))
1687 {
1688 tree gnu_local_raise
1689 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1690 tree gnu_exception_entity
1691 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1692 tree gnu_call
1693 = build_call_n_expr (gnu_local_raise, 1,
1694 build_unary_op (ADDR_EXPR, NULL_TREE,
1695 gnu_exception_entity));
1696
1697 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1698 gnu_call, gnu_result);}
1699
1700 return gnu_result;
1701 }
1702
1703 str
1704 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1705 ? ""
1706 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1707 ? IDENTIFIER_POINTER
1708 (get_identifier (Get_Name_String
1709 (Debug_Source_Name
1710 (Get_Source_File_Index (Sloc (gnat_node))))))
1711 : ref_filename;
1712
1713 len = strlen (str);
1714 filename = build_string (len, str);
1715 line_number
1716 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1717 ? Get_Logical_Line_Number (Sloc(gnat_node))
1718 : LOCATION_LINE (input_location);
1719
1720 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1721 build_index_type (size_int (len)));
1722
1723 return
1724 build_call_n_expr (fndecl, 2,
1725 build1 (ADDR_EXPR,
1726 build_pointer_type (unsigned_char_type_node),
1727 filename),
1728 build_int_cst (NULL_TREE, line_number));
1729 }
1730
1731 /* Similar to build_call_raise, for an index or range check exception as
1732 determined by MSG, with extra information generated of the form
1733 "INDEX out of range FIRST..LAST". */
1734
1735 tree
1736 build_call_raise_range (int msg, Node_Id gnat_node,
1737 tree index, tree first, tree last)
1738 {
1739 tree fndecl = gnat_raise_decls_ext[msg];
1740 tree filename;
1741 int line_number, column_number;
1742 const char *str;
1743 int len;
1744
1745 str
1746 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1747 ? ""
1748 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1749 ? IDENTIFIER_POINTER
1750 (get_identifier (Get_Name_String
1751 (Debug_Source_Name
1752 (Get_Source_File_Index (Sloc (gnat_node))))))
1753 : ref_filename;
1754
1755 len = strlen (str);
1756 filename = build_string (len, str);
1757 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1758 {
1759 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1760 column_number = Get_Column_Number (Sloc (gnat_node));
1761 }
1762 else
1763 {
1764 line_number = LOCATION_LINE (input_location);
1765 column_number = 0;
1766 }
1767
1768 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1769 build_index_type (size_int (len)));
1770
1771 return
1772 build_call_n_expr (fndecl, 6,
1773 build1 (ADDR_EXPR,
1774 build_pointer_type (unsigned_char_type_node),
1775 filename),
1776 build_int_cst (NULL_TREE, line_number),
1777 build_int_cst (NULL_TREE, column_number),
1778 convert (integer_type_node, index),
1779 convert (integer_type_node, first),
1780 convert (integer_type_node, last));
1781 }
1782
1783 /* Similar to build_call_raise, with extra information about the column
1784 where the check failed. */
1785
1786 tree
1787 build_call_raise_column (int msg, Node_Id gnat_node)
1788 {
1789 tree fndecl = gnat_raise_decls_ext[msg];
1790 tree filename;
1791 int line_number, column_number;
1792 const char *str;
1793 int len;
1794
1795 str
1796 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1797 ? ""
1798 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1799 ? IDENTIFIER_POINTER
1800 (get_identifier (Get_Name_String
1801 (Debug_Source_Name
1802 (Get_Source_File_Index (Sloc (gnat_node))))))
1803 : ref_filename;
1804
1805 len = strlen (str);
1806 filename = build_string (len, str);
1807 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1808 {
1809 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1810 column_number = Get_Column_Number (Sloc (gnat_node));
1811 }
1812 else
1813 {
1814 line_number = LOCATION_LINE (input_location);
1815 column_number = 0;
1816 }
1817
1818 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1819 build_index_type (size_int (len)));
1820
1821 return
1822 build_call_n_expr (fndecl, 3,
1823 build1 (ADDR_EXPR,
1824 build_pointer_type (unsigned_char_type_node),
1825 filename),
1826 build_int_cst (NULL_TREE, line_number),
1827 build_int_cst (NULL_TREE, column_number));
1828 }
1829 \f
1830 /* qsort comparer for the bit positions of two constructor elements
1831 for record components. */
1832
1833 static int
1834 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1835 {
1836 const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
1837 const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
1838 const_tree const field1 = elmt1->index;
1839 const_tree const field2 = elmt2->index;
1840 const int ret
1841 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1842
1843 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1844 }
1845
1846 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1847
1848 tree
1849 gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
1850 {
1851 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1852 bool read_only = true;
1853 bool side_effects = false;
1854 tree result, obj, val;
1855 unsigned int n_elmts;
1856
1857 /* Scan the elements to see if they are all constant or if any has side
1858 effects, to let us set global flags on the resulting constructor. Count
1859 the elements along the way for possible sorting purposes below. */
1860 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1861 {
1862 /* The predicate must be in keeping with output_constructor. */
1863 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1864 || (TREE_CODE (type) == RECORD_TYPE
1865 && CONSTRUCTOR_BITFIELD_P (obj)
1866 && !initializer_constant_valid_for_bitfield_p (val))
1867 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1868 allconstant = false;
1869
1870 if (!TREE_READONLY (val))
1871 read_only = false;
1872
1873 if (TREE_SIDE_EFFECTS (val))
1874 side_effects = true;
1875 }
1876
1877 /* For record types with constant components only, sort field list
1878 by increasing bit position. This is necessary to ensure the
1879 constructor can be output as static data. */
1880 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1881 v->qsort (compare_elmt_bitpos);
1882
1883 result = build_constructor (type, v);
1884 CONSTRUCTOR_NO_CLEARING (result) = 1;
1885 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1886 TREE_SIDE_EFFECTS (result) = side_effects;
1887 TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
1888 return result;
1889 }
1890 \f
1891 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1892 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1893 for the field. Don't fold the result if NO_FOLD_P is true.
1894
1895 We also handle the fact that we might have been passed a pointer to the
1896 actual record and know how to look for fields in variant parts. */
1897
1898 static tree
1899 build_simple_component_ref (tree record_variable, tree component, tree field,
1900 bool no_fold_p)
1901 {
1902 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1903 tree base, ref;
1904
1905 gcc_assert (RECORD_OR_UNION_TYPE_P (record_type)
1906 && COMPLETE_TYPE_P (record_type)
1907 && (component == NULL_TREE) != (field == NULL_TREE));
1908
1909 /* If no field was specified, look for a field with the specified name in
1910 the current record only. */
1911 if (!field)
1912 for (field = TYPE_FIELDS (record_type);
1913 field;
1914 field = DECL_CHAIN (field))
1915 if (DECL_NAME (field) == component)
1916 break;
1917
1918 if (!field)
1919 return NULL_TREE;
1920
1921 /* If this field is not in the specified record, see if we can find a field
1922 in the specified record whose original field is the same as this one. */
1923 if (DECL_CONTEXT (field) != record_type)
1924 {
1925 tree new_field;
1926
1927 /* First loop through normal components. */
1928 for (new_field = TYPE_FIELDS (record_type);
1929 new_field;
1930 new_field = DECL_CHAIN (new_field))
1931 if (SAME_FIELD_P (field, new_field))
1932 break;
1933
1934 /* Next, see if we're looking for an inherited component in an extension.
1935 If so, look through the extension directly, unless the type contains
1936 a placeholder, as it might be needed for a later substitution. */
1937 if (!new_field
1938 && TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1939 && TYPE_ALIGN_OK (record_type)
1940 && !type_contains_placeholder_p (record_type)
1941 && TREE_CODE (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1942 == RECORD_TYPE
1943 && TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (record_variable, 0))))
1944 {
1945 ref = build_simple_component_ref (TREE_OPERAND (record_variable, 0),
1946 NULL_TREE, field, no_fold_p);
1947 if (ref)
1948 return ref;
1949 }
1950
1951 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
1952 component in the first search. Doing this search in two steps is
1953 required to avoid hidden homonymous fields in the _Parent field. */
1954 if (!new_field)
1955 for (new_field = TYPE_FIELDS (record_type);
1956 new_field;
1957 new_field = DECL_CHAIN (new_field))
1958 if (DECL_INTERNAL_P (new_field))
1959 {
1960 tree field_ref
1961 = build_simple_component_ref (record_variable,
1962 NULL_TREE, new_field, no_fold_p);
1963 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1964 no_fold_p);
1965 if (ref)
1966 return ref;
1967 }
1968
1969 field = new_field;
1970 }
1971
1972 if (!field)
1973 return NULL_TREE;
1974
1975 /* If the field's offset has overflowed, do not try to access it, as doing
1976 so may trigger sanity checks deeper in the back-end. Note that we don't
1977 need to warn since this will be done on trying to declare the object. */
1978 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1979 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1980 return NULL_TREE;
1981
1982 /* We have found a suitable field. Before building the COMPONENT_REF, get
1983 the base object of the record variable if possible. */
1984 base = record_variable;
1985
1986 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR)
1987 {
1988 tree inner_variable = TREE_OPERAND (record_variable, 0);
1989 tree inner_type = TYPE_MAIN_VARIANT (TREE_TYPE (inner_variable));
1990
1991 /* Look through a conversion between type variants. This is transparent
1992 as far as the field is concerned. */
1993 if (inner_type == record_type)
1994 base = inner_variable;
1995
1996 /* Look through a conversion between original and packable version, but
1997 the field needs to be adjusted in this case. */
1998 else if (TYPE_NAME (inner_type) == TYPE_NAME (record_type))
1999 {
2000 tree new_field;
2001
2002 for (new_field = TYPE_FIELDS (inner_type);
2003 new_field;
2004 new_field = DECL_CHAIN (new_field))
2005 if (SAME_FIELD_P (field, new_field))
2006 break;
2007 if (new_field)
2008 {
2009 field = new_field;
2010 base = inner_variable;
2011 }
2012 }
2013 }
2014
2015 ref = build3 (COMPONENT_REF, TREE_TYPE (field), base, field, NULL_TREE);
2016
2017 if (TREE_READONLY (record_variable)
2018 || TREE_READONLY (field)
2019 || TYPE_READONLY (record_type))
2020 TREE_READONLY (ref) = 1;
2021
2022 if (TREE_THIS_VOLATILE (record_variable)
2023 || TREE_THIS_VOLATILE (field)
2024 || TYPE_VOLATILE (record_type))
2025 TREE_THIS_VOLATILE (ref) = 1;
2026
2027 if (no_fold_p)
2028 return ref;
2029
2030 /* The generic folder may punt in this case because the inner array type
2031 can be self-referential, but folding is in fact not problematic. */
2032 if (TREE_CODE (base) == CONSTRUCTOR
2033 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (base)))
2034 {
2035 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (base);
2036 unsigned HOST_WIDE_INT idx;
2037 tree index, value;
2038 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2039 if (index == field)
2040 return value;
2041 return ref;
2042 }
2043
2044 return fold (ref);
2045 }
2046 \f
2047 /* Likewise, but generate a Constraint_Error if the reference could not be
2048 found. */
2049
2050 tree
2051 build_component_ref (tree record_variable, tree component, tree field,
2052 bool no_fold_p)
2053 {
2054 tree ref = build_simple_component_ref (record_variable, component, field,
2055 no_fold_p);
2056 if (ref)
2057 return ref;
2058
2059 /* If FIELD was specified, assume this is an invalid user field so raise
2060 Constraint_Error. Otherwise, we have no type to return so abort. */
2061 gcc_assert (field);
2062 return build1 (NULL_EXPR, TREE_TYPE (field),
2063 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2064 N_Raise_Constraint_Error));
2065 }
2066 \f
2067 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2068 identically. Process the case where a GNAT_PROC to call is provided. */
2069
2070 static inline tree
2071 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2072 Entity_Id gnat_proc, Entity_Id gnat_pool)
2073 {
2074 tree gnu_proc = gnat_to_gnu (gnat_proc);
2075 tree gnu_call;
2076
2077 /* A storage pool's underlying type is a record type (for both predefined
2078 storage pools and GNAT simple storage pools). The secondary stack uses
2079 the same mechanism, but its pool object (SS_Pool) is an integer. */
2080 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2081 {
2082 /* The size is the third parameter; the alignment is the
2083 same type. */
2084 Entity_Id gnat_size_type
2085 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2086 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2087
2088 tree gnu_pool = gnat_to_gnu (gnat_pool);
2089 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2090 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2091
2092 gnu_size = convert (gnu_size_type, gnu_size);
2093 gnu_align = convert (gnu_size_type, gnu_align);
2094
2095 /* The first arg is always the address of the storage pool; next
2096 comes the address of the object, for a deallocator, then the
2097 size and alignment. */
2098 if (gnu_obj)
2099 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2100 gnu_size, gnu_align);
2101 else
2102 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2103 gnu_size, gnu_align);
2104 }
2105
2106 /* Secondary stack case. */
2107 else
2108 {
2109 /* The size is the second parameter. */
2110 Entity_Id gnat_size_type
2111 = Etype (Next_Formal (First_Formal (gnat_proc)));
2112 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2113
2114 gnu_size = convert (gnu_size_type, gnu_size);
2115
2116 /* The first arg is the address of the object, for a deallocator,
2117 then the size. */
2118 if (gnu_obj)
2119 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2120 else
2121 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2122 }
2123
2124 return gnu_call;
2125 }
2126
2127 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2128 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2129 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2130 latter offers. */
2131
2132 static inline tree
2133 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2134 {
2135 /* When the DATA_TYPE alignment is stricter than what malloc offers
2136 (super-aligned case), we allocate an "aligning" wrapper type and return
2137 the address of its single data field with the malloc's return value
2138 stored just in front. */
2139
2140 unsigned int data_align = TYPE_ALIGN (data_type);
2141 unsigned int system_allocator_alignment
2142 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2143
2144 tree aligning_type
2145 = ((data_align > system_allocator_alignment)
2146 ? make_aligning_type (data_type, data_align, data_size,
2147 system_allocator_alignment,
2148 POINTER_SIZE / BITS_PER_UNIT,
2149 gnat_node)
2150 : NULL_TREE);
2151
2152 tree size_to_malloc
2153 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2154
2155 tree malloc_ptr;
2156
2157 /* On VMS, if pointers are 64-bit and the allocator size is 32-bit or
2158 Convention C, allocate 32-bit memory. */
2159 if (TARGET_ABI_OPEN_VMS
2160 && POINTER_SIZE == 64
2161 && Nkind (gnat_node) == N_Allocator
2162 && (UI_To_Int (Esize (Etype (gnat_node))) == 32
2163 || Convention (Etype (gnat_node)) == Convention_C))
2164 malloc_ptr = build_call_n_expr (malloc32_decl, 1, size_to_malloc);
2165 else
2166 malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2167
2168 if (aligning_type)
2169 {
2170 /* Latch malloc's return value and get a pointer to the aligning field
2171 first. */
2172 tree storage_ptr = gnat_protect_expr (malloc_ptr);
2173
2174 tree aligning_record_addr
2175 = convert (build_pointer_type (aligning_type), storage_ptr);
2176
2177 tree aligning_record
2178 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2179
2180 tree aligning_field
2181 = build_component_ref (aligning_record, NULL_TREE,
2182 TYPE_FIELDS (aligning_type), false);
2183
2184 tree aligning_field_addr
2185 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2186
2187 /* Then arrange to store the allocator's return value ahead
2188 and return. */
2189 tree storage_ptr_slot_addr
2190 = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
2191 convert (ptr_void_type_node, aligning_field_addr),
2192 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2193 / BITS_PER_UNIT));
2194
2195 tree storage_ptr_slot
2196 = build_unary_op (INDIRECT_REF, NULL_TREE,
2197 convert (build_pointer_type (ptr_void_type_node),
2198 storage_ptr_slot_addr));
2199
2200 return
2201 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2202 build_binary_op (INIT_EXPR, NULL_TREE,
2203 storage_ptr_slot, storage_ptr),
2204 aligning_field_addr);
2205 }
2206 else
2207 return malloc_ptr;
2208 }
2209
2210 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2211 designated by DATA_PTR using the __gnat_free entry point. */
2212
2213 static inline tree
2214 maybe_wrap_free (tree data_ptr, tree data_type)
2215 {
2216 /* In the regular alignment case, we pass the data pointer straight to free.
2217 In the superaligned case, we need to retrieve the initial allocator
2218 return value, stored in front of the data block at allocation time. */
2219
2220 unsigned int data_align = TYPE_ALIGN (data_type);
2221 unsigned int system_allocator_alignment
2222 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2223
2224 tree free_ptr;
2225
2226 if (data_align > system_allocator_alignment)
2227 {
2228 /* DATA_FRONT_PTR (void *)
2229 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2230 tree data_front_ptr
2231 = build_binary_op
2232 (POINTER_PLUS_EXPR, ptr_void_type_node,
2233 convert (ptr_void_type_node, data_ptr),
2234 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2235
2236 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2237 free_ptr
2238 = build_unary_op
2239 (INDIRECT_REF, NULL_TREE,
2240 convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
2241 }
2242 else
2243 free_ptr = data_ptr;
2244
2245 return build_call_n_expr (free_decl, 1, free_ptr);
2246 }
2247
2248 /* Build a GCC tree to call an allocation or deallocation function.
2249 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2250 generate an allocator.
2251
2252 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2253 object type, used to determine the to-be-honored address alignment.
2254 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2255 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2256 to provide an error location for restriction violation messages. */
2257
2258 tree
2259 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2260 Entity_Id gnat_proc, Entity_Id gnat_pool,
2261 Node_Id gnat_node)
2262 {
2263 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
2264
2265 /* Explicit proc to call ? This one is assumed to deal with the type
2266 alignment constraints. */
2267 if (Present (gnat_proc))
2268 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2269 gnat_proc, gnat_pool);
2270
2271 /* Otherwise, object to "free" or "malloc" with possible special processing
2272 for alignments stricter than what the default allocator honors. */
2273 else if (gnu_obj)
2274 return maybe_wrap_free (gnu_obj, gnu_type);
2275 else
2276 {
2277 /* Assert that we no longer can be called with this special pool. */
2278 gcc_assert (gnat_pool != -1);
2279
2280 /* Check that we aren't violating the associated restriction. */
2281 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2282 Check_No_Implicit_Heap_Alloc (gnat_node);
2283
2284 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2285 }
2286 }
2287 \f
2288 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2289 initial value is INIT, if INIT is nonzero. Convert the expression to
2290 RESULT_TYPE, which must be some pointer type, and return the result.
2291
2292 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2293 the storage pool to use. GNAT_NODE is used to provide an error
2294 location for restriction violation messages. If IGNORE_INIT_TYPE is
2295 true, ignore the type of INIT for the purpose of determining the size;
2296 this will cause the maximum size to be allocated if TYPE is of
2297 self-referential size. */
2298
2299 tree
2300 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2301 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2302 {
2303 tree size, storage, storage_deref, storage_init;
2304
2305 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2306 if (init && TREE_CODE (init) == NULL_EXPR)
2307 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2308
2309 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2310 else if (init && TREE_CODE (init) == COND_EXPR)
2311 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2312 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2313 gnat_proc, gnat_pool, gnat_node,
2314 ignore_init_type),
2315 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2316 gnat_proc, gnat_pool, gnat_node,
2317 ignore_init_type));
2318
2319 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2320 sizes of the object and its template. Allocate the whole thing and
2321 fill in the parts that are known. */
2322 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2323 {
2324 tree storage_type
2325 = build_unc_object_type_from_ptr (result_type, type,
2326 get_identifier ("ALLOC"), false);
2327 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2328 tree storage_ptr_type = build_pointer_type (storage_type);
2329
2330 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2331 init);
2332
2333 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2334 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2335 size = size_int (-1);
2336
2337 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2338 gnat_proc, gnat_pool, gnat_node);
2339 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2340 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2341 TREE_THIS_NOTRAP (storage_deref) = 1;
2342
2343 /* If there is an initializing expression, then make a constructor for
2344 the entire object including the bounds and copy it into the object.
2345 If there is no initializing expression, just set the bounds. */
2346 if (init)
2347 {
2348 vec<constructor_elt, va_gc> *v;
2349 vec_alloc (v, 2);
2350
2351 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2352 build_template (template_type, type, init));
2353 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2354 init);
2355 storage_init
2356 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2357 gnat_build_constructor (storage_type, v));
2358 }
2359 else
2360 storage_init
2361 = build_binary_op (INIT_EXPR, NULL_TREE,
2362 build_component_ref (storage_deref, NULL_TREE,
2363 TYPE_FIELDS (storage_type),
2364 false),
2365 build_template (template_type, type, NULL_TREE));
2366
2367 return build2 (COMPOUND_EXPR, result_type,
2368 storage_init, convert (result_type, storage));
2369 }
2370
2371 size = TYPE_SIZE_UNIT (type);
2372
2373 /* If we have an initializing expression, see if its size is simpler
2374 than the size from the type. */
2375 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2376 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2377 || CONTAINS_PLACEHOLDER_P (size)))
2378 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2379
2380 /* If the size is still self-referential, reference the initializing
2381 expression, if it is present. If not, this must have been a
2382 call to allocate a library-level object, in which case we use
2383 the maximum size. */
2384 if (CONTAINS_PLACEHOLDER_P (size))
2385 {
2386 if (!ignore_init_type && init)
2387 size = substitute_placeholder_in_expr (size, init);
2388 else
2389 size = max_size (size, true);
2390 }
2391
2392 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2393 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2394 size = size_int (-1);
2395
2396 storage = convert (result_type,
2397 build_call_alloc_dealloc (NULL_TREE, size, type,
2398 gnat_proc, gnat_pool,
2399 gnat_node));
2400
2401 /* If we have an initial value, protect the new address, assign the value
2402 and return the address with a COMPOUND_EXPR. */
2403 if (init)
2404 {
2405 storage = gnat_protect_expr (storage);
2406 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2407 TREE_THIS_NOTRAP (storage_deref) = 1;
2408 storage_init
2409 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2410 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2411 }
2412
2413 return storage;
2414 }
2415 \f
2416 /* Indicate that we need to take the address of T and that it therefore
2417 should not be allocated in a register. Returns true if successful. */
2418
2419 bool
2420 gnat_mark_addressable (tree t)
2421 {
2422 while (true)
2423 switch (TREE_CODE (t))
2424 {
2425 case ADDR_EXPR:
2426 case COMPONENT_REF:
2427 case ARRAY_REF:
2428 case ARRAY_RANGE_REF:
2429 case REALPART_EXPR:
2430 case IMAGPART_EXPR:
2431 case VIEW_CONVERT_EXPR:
2432 case NON_LVALUE_EXPR:
2433 CASE_CONVERT:
2434 t = TREE_OPERAND (t, 0);
2435 break;
2436
2437 case COMPOUND_EXPR:
2438 t = TREE_OPERAND (t, 1);
2439 break;
2440
2441 case CONSTRUCTOR:
2442 TREE_ADDRESSABLE (t) = 1;
2443 return true;
2444
2445 case VAR_DECL:
2446 case PARM_DECL:
2447 case RESULT_DECL:
2448 TREE_ADDRESSABLE (t) = 1;
2449 return true;
2450
2451 case FUNCTION_DECL:
2452 TREE_ADDRESSABLE (t) = 1;
2453 return true;
2454
2455 case CONST_DECL:
2456 return DECL_CONST_CORRESPONDING_VAR (t)
2457 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2458
2459 default:
2460 return true;
2461 }
2462 }
2463 \f
2464 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2465 but we know how to handle our own nodes. */
2466
2467 tree
2468 gnat_save_expr (tree exp)
2469 {
2470 tree type = TREE_TYPE (exp);
2471 enum tree_code code = TREE_CODE (exp);
2472
2473 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2474 return exp;
2475
2476 if (code == UNCONSTRAINED_ARRAY_REF)
2477 {
2478 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2479 TREE_READONLY (t) = TYPE_READONLY (type);
2480 return t;
2481 }
2482
2483 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2484 This may be more efficient, but will also allow us to more easily find
2485 the match for the PLACEHOLDER_EXPR. */
2486 if (code == COMPONENT_REF
2487 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2488 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2489 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2490
2491 return save_expr (exp);
2492 }
2493
2494 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2495 is optimized under the assumption that EXP's value doesn't change before
2496 its subsequent reuse(s) except through its potential reevaluation. */
2497
2498 tree
2499 gnat_protect_expr (tree exp)
2500 {
2501 tree type = TREE_TYPE (exp);
2502 enum tree_code code = TREE_CODE (exp);
2503
2504 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2505 return exp;
2506
2507 /* If EXP has no side effects, we theoretically don't need to do anything.
2508 However, we may be recursively passed more and more complex expressions
2509 involving checks which will be reused multiple times and eventually be
2510 unshared for gimplification; in order to avoid a complexity explosion
2511 at that point, we protect any expressions more complex than a simple
2512 arithmetic expression. */
2513 if (!TREE_SIDE_EFFECTS (exp))
2514 {
2515 tree inner = skip_simple_arithmetic (exp);
2516 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2517 return exp;
2518 }
2519
2520 /* If this is a conversion, protect what's inside the conversion. */
2521 if (code == NON_LVALUE_EXPR
2522 || CONVERT_EXPR_CODE_P (code)
2523 || code == VIEW_CONVERT_EXPR)
2524 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2525
2526 /* If we're indirectly referencing something, we only need to protect the
2527 address since the data itself can't change in these situations. */
2528 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2529 {
2530 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2531 TREE_READONLY (t) = TYPE_READONLY (type);
2532 return t;
2533 }
2534
2535 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2536 This may be more efficient, but will also allow us to more easily find
2537 the match for the PLACEHOLDER_EXPR. */
2538 if (code == COMPONENT_REF
2539 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2540 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2541 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2542
2543 /* If this is a fat pointer or something that can be placed in a register,
2544 just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
2545 returned via invisible reference in most ABIs so the temporary will
2546 directly be filled by the callee. */
2547 if (TYPE_IS_FAT_POINTER_P (type)
2548 || TYPE_MODE (type) != BLKmode
2549 || code == CALL_EXPR)
2550 return save_expr (exp);
2551
2552 /* Otherwise reference, protect the address and dereference. */
2553 return
2554 build_unary_op (INDIRECT_REF, type,
2555 save_expr (build_unary_op (ADDR_EXPR,
2556 build_reference_type (type),
2557 exp)));
2558 }
2559
2560 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2561 argument to force evaluation of everything. */
2562
2563 static tree
2564 gnat_stabilize_reference_1 (tree e, bool force)
2565 {
2566 enum tree_code code = TREE_CODE (e);
2567 tree type = TREE_TYPE (e);
2568 tree result;
2569
2570 /* We cannot ignore const expressions because it might be a reference
2571 to a const array but whose index contains side-effects. But we can
2572 ignore things that are actual constant or that already have been
2573 handled by this function. */
2574 if (TREE_CONSTANT (e) || code == SAVE_EXPR)
2575 return e;
2576
2577 switch (TREE_CODE_CLASS (code))
2578 {
2579 case tcc_exceptional:
2580 case tcc_declaration:
2581 case tcc_comparison:
2582 case tcc_expression:
2583 case tcc_reference:
2584 case tcc_vl_exp:
2585 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2586 fat pointer. This may be more efficient, but will also allow
2587 us to more easily find the match for the PLACEHOLDER_EXPR. */
2588 if (code == COMPONENT_REF
2589 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2590 result
2591 = build3 (code, type,
2592 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2593 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2594 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2595 so that it will only be evaluated once. */
2596 /* The tcc_reference and tcc_comparison classes could be handled as
2597 below, but it is generally faster to only evaluate them once. */
2598 else if (TREE_SIDE_EFFECTS (e) || force)
2599 return save_expr (e);
2600 else
2601 return e;
2602 break;
2603
2604 case tcc_binary:
2605 /* Recursively stabilize each operand. */
2606 result
2607 = build2 (code, type,
2608 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2609 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), force));
2610 break;
2611
2612 case tcc_unary:
2613 /* Recursively stabilize each operand. */
2614 result
2615 = build1 (code, type,
2616 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force));
2617 break;
2618
2619 default:
2620 gcc_unreachable ();
2621 }
2622
2623 /* See similar handling in gnat_stabilize_reference. */
2624 TREE_READONLY (result) = TREE_READONLY (e);
2625 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2626 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2627
2628 if (code == INDIRECT_REF
2629 || code == UNCONSTRAINED_ARRAY_REF
2630 || code == ARRAY_REF
2631 || code == ARRAY_RANGE_REF)
2632 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (e);
2633
2634 return result;
2635 }
2636
2637 /* This is equivalent to stabilize_reference in tree.c but we know how to
2638 handle our own nodes and we take extra arguments. FORCE says whether to
2639 force evaluation of everything. We set SUCCESS to true unless we walk
2640 through something we don't know how to stabilize. */
2641
2642 tree
2643 gnat_stabilize_reference (tree ref, bool force, bool *success)
2644 {
2645 tree type = TREE_TYPE (ref);
2646 enum tree_code code = TREE_CODE (ref);
2647 tree result;
2648
2649 /* Assume we'll success unless proven otherwise. */
2650 if (success)
2651 *success = true;
2652
2653 switch (code)
2654 {
2655 case CONST_DECL:
2656 case VAR_DECL:
2657 case PARM_DECL:
2658 case RESULT_DECL:
2659 /* No action is needed in this case. */
2660 return ref;
2661
2662 case ADDR_EXPR:
2663 CASE_CONVERT:
2664 case FLOAT_EXPR:
2665 case FIX_TRUNC_EXPR:
2666 case VIEW_CONVERT_EXPR:
2667 result
2668 = build1 (code, type,
2669 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2670 success));
2671 break;
2672
2673 case INDIRECT_REF:
2674 case UNCONSTRAINED_ARRAY_REF:
2675 result = build1 (code, type,
2676 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 0),
2677 force));
2678 break;
2679
2680 case COMPONENT_REF:
2681 result = build3 (COMPONENT_REF, type,
2682 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2683 success),
2684 TREE_OPERAND (ref, 1), NULL_TREE);
2685 break;
2686
2687 case BIT_FIELD_REF:
2688 result = build3 (BIT_FIELD_REF, type,
2689 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2690 success),
2691 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
2692 break;
2693
2694 case ARRAY_REF:
2695 case ARRAY_RANGE_REF:
2696 result = build4 (code, type,
2697 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2698 success),
2699 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2700 force),
2701 NULL_TREE, NULL_TREE);
2702 break;
2703
2704 case CALL_EXPR:
2705 result = gnat_stabilize_reference_1 (ref, force);
2706 break;
2707
2708 case COMPOUND_EXPR:
2709 result = build2 (COMPOUND_EXPR, type,
2710 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2711 success),
2712 gnat_stabilize_reference (TREE_OPERAND (ref, 1), force,
2713 success));
2714 break;
2715
2716 case CONSTRUCTOR:
2717 /* Constructors with 1 element are used extensively to formally
2718 convert objects to special wrapping types. */
2719 if (TREE_CODE (type) == RECORD_TYPE
2720 && vec_safe_length (CONSTRUCTOR_ELTS (ref)) == 1)
2721 {
2722 tree index = (*CONSTRUCTOR_ELTS (ref))[0].index;
2723 tree value = (*CONSTRUCTOR_ELTS (ref))[0].value;
2724 result
2725 = build_constructor_single (type, index,
2726 gnat_stabilize_reference_1 (value,
2727 force));
2728 }
2729 else
2730 {
2731 if (success)
2732 *success = false;
2733 return ref;
2734 }
2735 break;
2736
2737 case ERROR_MARK:
2738 ref = error_mark_node;
2739
2740 /* ... fall through to failure ... */
2741
2742 /* If arg isn't a kind of lvalue we recognize, make no change.
2743 Caller should recognize the error for an invalid lvalue. */
2744 default:
2745 if (success)
2746 *success = false;
2747 return ref;
2748 }
2749
2750 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2751 may not be sustained across some paths, such as the way via build1 for
2752 INDIRECT_REF. We reset those flags here in the general case, which is
2753 consistent with the GCC version of this routine.
2754
2755 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2756 paths introduce side-effects where there was none initially (e.g. if a
2757 SAVE_EXPR is built) and we also want to keep track of that. */
2758 TREE_READONLY (result) = TREE_READONLY (ref);
2759 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2760 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2761
2762 if (code == INDIRECT_REF
2763 || code == UNCONSTRAINED_ARRAY_REF
2764 || code == ARRAY_REF
2765 || code == ARRAY_RANGE_REF)
2766 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2767
2768 return result;
2769 }
2770
2771 /* If EXPR is an expression that is invariant in the current function, in the
2772 sense that it can be evaluated anywhere in the function and any number of
2773 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2774
2775 tree
2776 gnat_invariant_expr (tree expr)
2777 {
2778 tree type = TREE_TYPE (expr), t;
2779
2780 expr = remove_conversions (expr, false);
2781
2782 while ((TREE_CODE (expr) == CONST_DECL
2783 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2784 && decl_function_context (expr) == current_function_decl
2785 && DECL_INITIAL (expr))
2786 expr = remove_conversions (DECL_INITIAL (expr), false);
2787
2788 if (TREE_CONSTANT (expr))
2789 return fold_convert (type, expr);
2790
2791 t = expr;
2792
2793 while (true)
2794 {
2795 switch (TREE_CODE (t))
2796 {
2797 case COMPONENT_REF:
2798 if (TREE_OPERAND (t, 2) != NULL_TREE)
2799 return NULL_TREE;
2800 break;
2801
2802 case ARRAY_REF:
2803 case ARRAY_RANGE_REF:
2804 if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
2805 || TREE_OPERAND (t, 2) != NULL_TREE
2806 || TREE_OPERAND (t, 3) != NULL_TREE)
2807 return NULL_TREE;
2808 break;
2809
2810 case BIT_FIELD_REF:
2811 case VIEW_CONVERT_EXPR:
2812 case REALPART_EXPR:
2813 case IMAGPART_EXPR:
2814 break;
2815
2816 case INDIRECT_REF:
2817 if (!TREE_READONLY (t)
2818 || TREE_SIDE_EFFECTS (t)
2819 || !TREE_THIS_NOTRAP (t))
2820 return NULL_TREE;
2821 break;
2822
2823 default:
2824 goto object;
2825 }
2826
2827 t = TREE_OPERAND (t, 0);
2828 }
2829
2830 object:
2831 if (TREE_SIDE_EFFECTS (t))
2832 return NULL_TREE;
2833
2834 if (TREE_CODE (t) == CONST_DECL
2835 && (DECL_EXTERNAL (t)
2836 || decl_function_context (t) != current_function_decl))
2837 return fold_convert (type, expr);
2838
2839 if (!TREE_READONLY (t))
2840 return NULL_TREE;
2841
2842 if (TREE_CODE (t) == CONSTRUCTOR || TREE_CODE (t) == PARM_DECL)
2843 return fold_convert (type, expr);
2844
2845 if (TREE_CODE (t) == VAR_DECL
2846 && (DECL_EXTERNAL (t)
2847 || decl_function_context (t) != current_function_decl))
2848 return fold_convert (type, expr);
2849
2850 return NULL_TREE;
2851 }
This page took 0.171773 seconds and 6 git commands to generate.